file_name
stringlengths 5
52
| name
stringlengths 4
95
| original_source_type
stringlengths 0
23k
| source_type
stringlengths 9
23k
| source_definition
stringlengths 9
57.9k
| source
dict | source_range
dict | file_context
stringlengths 0
721k
| dependencies
dict | opens_and_abbrevs
listlengths 2
94
| vconfig
dict | interleaved
bool 1
class | verbose_type
stringlengths 1
7.42k
| effect
stringclasses 118
values | effect_flags
sequencelengths 0
2
| mutual_with
sequencelengths 0
11
| ideal_premises
sequencelengths 0
236
| proof_features
sequencelengths 0
1
| is_simple_lemma
bool 2
classes | is_div
bool 2
classes | is_proof
bool 2
classes | is_simply_typed
bool 2
classes | is_type
bool 2
classes | partial_definition
stringlengths 5
3.99k
| completed_definiton
stringlengths 1
1.63M
| isa_cross_project_example
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Vale.Def.Words.Four_s.fsti | Vale.Def.Words.Four_s.nat_to_four_unfold | val nat_to_four_unfold (size: nat) (n: natN (pow2 (4 * size))) : four (natN (pow2 size)) | val nat_to_four_unfold (size: nat) (n: natN (pow2 (4 * size))) : four (natN (pow2 size)) | let nat_to_four_unfold (size:nat) (n:natN (pow2 (4 * size))) : four (natN (pow2 size)) =
let n1 = pow2_norm size in
let n2 = pow2_norm (2 * size) in
let n3 = pow2_norm (3 * size) in
let n4 = pow2_norm (4 * size) in
Mkfour (n % n1) ((n / n1) % n1) ((n / n2) % n1) ((n / n3) % n1) | {
"file_name": "vale/specs/defs/Vale.Def.Words.Four_s.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 65,
"end_line": 28,
"start_col": 0,
"start_line": 23
} | module Vale.Def.Words.Four_s
open Vale.Def.Words_s
open FStar.Mul
unfold let four_map (#a #b:Type) (f:a -> b) (x:four a) : four b =
let Mkfour x0 x1 x2 x3 = x in
Mkfour (f x0) (f x1) (f x2) (f x3)
unfold let four_map2 (#a #b:Type) (f:a -> a -> b) (x y:four a) : four b =
let Mkfour x0 x1 x2 x3 = x in
let Mkfour y0 y1 y2 y3 = y in
Mkfour (f x0 y0) (f x1 y1) (f x2 y2) (f x3 y3)
let two_two_to_four (#a:Type) (x:two (two a)) : four a =
let (Mktwo (Mktwo x0 x1) (Mktwo x2 x3)) = x in
Mkfour x0 x1 x2 x3
let four_to_two_two (#a:Type) (x:four a) : two (two a) =
let Mkfour x0 x1 x2 x3 = x in
Mktwo (Mktwo x0 x1) (Mktwo x2 x3) | {
"checked_file": "/",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Def.Words.Four_s.fsti"
} | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | size: Prims.nat -> n: Vale.Def.Words_s.natN (Prims.pow2 (4 * size))
-> Vale.Def.Words_s.four (Vale.Def.Words_s.natN (Prims.pow2 size)) | Prims.Tot | [
"total"
] | [] | [
"Prims.nat",
"Vale.Def.Words_s.natN",
"Prims.pow2",
"FStar.Mul.op_Star",
"Vale.Def.Words_s.Mkfour",
"Prims.op_Modulus",
"Prims.op_Division",
"Prims.pos",
"Vale.Def.Words_s.pow2_norm",
"Vale.Def.Words_s.four"
] | [] | false | false | false | false | false | let nat_to_four_unfold (size: nat) (n: natN (pow2 (4 * size))) : four (natN (pow2 size)) =
| let n1 = pow2_norm size in
let n2 = pow2_norm (2 * size) in
let n3 = pow2_norm (3 * size) in
let n4 = pow2_norm (4 * size) in
Mkfour (n % n1) ((n / n1) % n1) ((n / n2) % n1) ((n / n3) % n1) | false |
Vale.AES.GCM_helpers.fsti | Vale.AES.GCM_helpers.bytes_to_quad_size | val bytes_to_quad_size : num_bytes: Prims.nat -> Prims.int | let bytes_to_quad_size (num_bytes:nat) =
((num_bytes + 15) / 16) | {
"file_name": "vale/code/crypto/aes/Vale.AES.GCM_helpers.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 25,
"end_line": 16,
"start_col": 0,
"start_line": 15
} | module Vale.AES.GCM_helpers
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Words.Four_s
open Vale.Def.Types_s
open Vale.Arch.Types
open FStar.Mul
open FStar.Seq
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open FStar.Math.Lemmas
open Vale.Lib.Seqs | {
"checked_file": "/",
"dependencies": [
"Vale.Lib.Seqs.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Words.Four_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.AES_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked"
],
"interface_file": false,
"source_file": "Vale.AES.GCM_helpers.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.Lib.Seqs",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Four_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | num_bytes: Prims.nat -> Prims.int | Prims.Tot | [
"total"
] | [] | [
"Prims.nat",
"Prims.op_Division",
"Prims.op_Addition",
"Prims.int"
] | [] | false | false | false | true | false | let bytes_to_quad_size (num_bytes: nat) =
| ((num_bytes + 15) / 16) | false |
|
FStar.Math.Euclid.fst | FStar.Math.Euclid.is_gcd_prime | val is_gcd_prime (p:int{is_prime p}) (a:pos{a < p}) : Lemma (is_gcd p a 1) | val is_gcd_prime (p:int{is_prime p}) (a:pos{a < p}) : Lemma (is_gcd p a 1) | let is_gcd_prime p a =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
Classical.forall_intro (Classical.move_requires (is_gcd_prime_aux p a));
assert (forall x. x `divides` p /\ x `divides` a ==> x = 1 \/ x = -1 /\ x `divides` 1) | {
"file_name": "ulib/FStar.Math.Euclid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 88,
"end_line": 208,
"start_col": 0,
"start_line": 205
} | module FStar.Math.Euclid
open FStar.Mul
open FStar.Math.Lemmas
///
/// Auxiliary lemmas
///
val eq_mult_left (a b:int) : Lemma (requires a = b * a) (ensures a = 0 \/ b = 1)
let eq_mult_left a b = ()
val eq_mult_one (a b:int) : Lemma
(requires a * b = 1)
(ensures (a = 1 /\ b = 1) \/ (a = -1 /\ b = -1))
let eq_mult_one a b = ()
val opp_idempotent (a:int) : Lemma (-(-a) == a)
let opp_idempotent a = ()
val add_sub_l (a b:int) : Lemma (a - b + b = a)
let add_sub_l a b = ()
val add_sub_r (a b:int) : Lemma (a + b - b = a)
let add_sub_r a b = ()
///
/// Divides relation
///
let divides_reflexive a =
Classical.exists_intro (fun q -> a = q * a) 1
let divides_transitive a b c =
eliminate exists q1. b == q1 * a
returns a `divides` c
with _pf.
eliminate exists q2. c == q2 * b
returns _
with _pf2.
introduce exists q. c == q * a
with (q1 * q2)
and ()
let divide_antisym a b =
if a <> 0 then
Classical.exists_elim (a = b \/ a = -b) (Squash.get_proof (exists q1. b = q1 * a))
(fun q1 ->
Classical.exists_elim (a = b \/ a = -b) (Squash.get_proof (exists q2. a = q2 * b))
(fun q2 ->
assert (b = q1 * a);
assert (a = q2 * b);
assert (b = q1 * (q2 * b));
paren_mul_right q1 q2 b;
eq_mult_left b (q1 * q2);
eq_mult_one q1 q2))
let divides_0 a =
Classical.exists_intro (fun q -> 0 = q * a) 0
let divides_1 a = ()
let divides_minus a b =
Classical.exists_elim (a `divides` (-b))
(Squash.get_proof (a `divides` b))
(fun q -> Classical.exists_intro (fun q' -> -b = q' * a) (-q))
let divides_opp a b =
Classical.exists_elim ((-a) `divides` b)
(Squash.get_proof (a `divides` b))
(fun q -> Classical.exists_intro (fun q' -> b = q' * (-a)) (-q))
let divides_plus a b d =
Classical.exists_elim (d `divides` (a + b)) (Squash.get_proof (exists q1. a = q1 * d))
(fun q1 ->
Classical.exists_elim (d `divides` (a + b)) (Squash.get_proof (exists q2. b = q2 * d))
(fun q2 ->
assert (a + b = q1 * d + q2 * d);
distributivity_add_left q1 q2 d;
Classical.exists_intro (fun q -> a + b = q * d) (q1 + q2)))
let divides_sub a b d =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
divides_plus a (-b) d
let divides_mult_right a b d =
Classical.exists_elim (d `divides` (a * b)) (Squash.get_proof (d `divides` b))
(fun q ->
paren_mul_right a q d;
Classical.exists_intro (fun r -> a * b = r * d) (a * q))
///
/// GCD
///
let mod_divides a b =
Classical.exists_intro (fun q -> a = q * b) (a / b)
let divides_mod a b =
Classical.exists_elim (a % b = 0) (Squash.get_proof (b `divides` a))
(fun q -> cancel_mul_div q b)
let is_gcd_unique a b c d =
divide_antisym c d
let is_gcd_reflexive a = ()
let is_gcd_symmetric a b d = ()
let is_gcd_0 a = ()
let is_gcd_1 a = ()
let is_gcd_minus a b d =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
opp_idempotent b
let is_gcd_opp a b d =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
divides_opp d a;
divides_opp d b
let is_gcd_plus a b q d =
add_sub_r b (q * a);
Classical.forall_intro_3 (Classical.move_requires_3 divides_plus);
Classical.forall_intro_3 (Classical.move_requires_3 divides_mult_right);
Classical.forall_intro_3 (Classical.move_requires_3 divides_sub)
///
/// Extended Euclidean algorithm
///
val is_gcd_for_euclid (a b q d:int) : Lemma
(requires is_gcd b (a - q * b) d)
(ensures is_gcd a b d)
let is_gcd_for_euclid a b q d =
add_sub_l a (q * b);
is_gcd_plus b (a - q * b) q d
val egcd (a b u1 u2 u3 v1 v2 v3:int) : Pure (int & int & int)
(requires v3 >= 0 /\
u1 * a + u2 * b = u3 /\
v1 * a + v2 * b = v3 /\
(forall d. is_gcd u3 v3 d ==> is_gcd a b d))
(ensures (fun (u, v, d) -> u * a + v * b = d /\ is_gcd a b d))
(decreases v3)
let rec egcd a b u1 u2 u3 v1 v2 v3 =
if v3 = 0 then
begin
divides_0 u3;
(u1, u2, u3)
end
else
begin
let q = u3 / v3 in
euclidean_division_definition u3 v3;
assert (u3 - q * v3 = (q * v3 + u3 % v3) - q * v3);
assert (q * v3 - q * v3 = 0);
swap_add_plus_minus (q * v3) (u3 % v3) (q * v3);
calc (==) {
(u1 - q * v1) * a + (u2 - q * v2) * b;
== { _ by (FStar.Tactics.Canon.canon()) }
(u1 * a + u2 * b) - q * (v1 * a + v2 * b);
== { }
u3 - q * v3;
== { lemma_div_mod u3 v3 }
u3 % v3;
};
let u1, v1 = v1, u1 - q * v1 in
let u2, v2 = v2, u2 - q * v2 in
let u3' = u3 in
let v3' = v3 in
let u3, v3 = v3, u3 - q * v3 in
(* proving the implication in the precondition *)
introduce forall d. is_gcd v3' (u3' - q * v3') d ==> is_gcd u3' v3' d with
introduce _ ==> _ with _.
is_gcd_for_euclid u3' v3' q d;
let r = egcd a b u1 u2 u3 v1 v2 v3 in
r
end
let euclid_gcd a b =
if b >= 0 then
egcd a b 1 0 a 0 1 b
else (
introduce forall d. is_gcd a (-b) d ==> is_gcd a b d
with introduce _ ==> _
with _pf.
(is_gcd_minus a b d;
is_gcd_symmetric b a d);
let res = egcd a b 1 0 a 0 (-1) (-b) in
let _, _, d = res in
assert (is_gcd a b d);
res
)
val is_gcd_prime_aux (p:int) (a:pos{a < p}) (d:int) : Lemma
(requires is_prime p /\ d `divides` p /\ d `divides` a)
(ensures d = 1 \/ d = -1)
let is_gcd_prime_aux p a d = () | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.Canon.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Classical.Sugar.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "FStar.Math.Euclid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: Prims.int{FStar.Math.Euclid.is_prime p} -> a: Prims.pos{a < p}
-> FStar.Pervasives.Lemma (ensures FStar.Math.Euclid.is_gcd p a 1) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.int",
"FStar.Math.Euclid.is_prime",
"Prims.pos",
"Prims.b2t",
"Prims.op_LessThan",
"Prims._assert",
"Prims.l_Forall",
"Prims.l_imp",
"Prims.l_and",
"FStar.Math.Euclid.divides",
"Prims.l_or",
"Prims.op_Equality",
"Prims.op_Minus",
"Prims.unit",
"FStar.Classical.forall_intro",
"FStar.Classical.move_requires",
"FStar.Math.Euclid.is_gcd_prime_aux",
"FStar.Classical.forall_intro_2",
"FStar.Classical.move_requires_2",
"FStar.Math.Euclid.divides_minus"
] | [] | false | false | true | false | false | let is_gcd_prime p a =
| Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
Classical.forall_intro (Classical.move_requires (is_gcd_prime_aux p a));
assert (forall x. x `divides` p /\ x `divides` a ==> x = 1 \/ x = - 1 /\ x `divides` 1) | false |
Vale.Curve25519.FastMul_helpers.fsti | Vale.Curve25519.FastMul_helpers.int_canon | val int_canon : _: _ -> FStar.Tactics.Effect.Tac Prims.unit | let int_canon = fun _ -> norm [delta; zeta; iota]; int_semiring () | {
"file_name": "vale/code/crypto/ecc/curve25519/Vale.Curve25519.FastMul_helpers.fsti",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 66,
"end_line": 11,
"start_col": 0,
"start_line": 11
} | module Vale.Curve25519.FastMul_helpers
open Vale.Def.Words_s
open Vale.Def.Types_s
open FStar.Mul
open FStar.Tactics
open FStar.Tactics.CanonCommSemiring
open Vale.Curve25519.Fast_defs | {
"checked_file": "/",
"dependencies": [
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"prims.fst.checked",
"FStar.Tactics.CanonCommSemiring.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Curve25519.FastMul_helpers.fsti"
} | [
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics.CanonCommSemiring",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: _ -> FStar.Tactics.Effect.Tac Prims.unit | FStar.Tactics.Effect.Tac | [] | [] | [
"FStar.Tactics.CanonCommSemiring.int_semiring",
"Prims.unit",
"FStar.Stubs.Tactics.V1.Builtins.norm",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.delta",
"FStar.Pervasives.zeta",
"FStar.Pervasives.iota",
"Prims.Nil"
] | [] | false | true | false | false | false | let int_canon =
| fun _ ->
norm [delta; zeta; iota];
int_semiring () | false |
|
Hacl.K256.PrecompTable.fst | Hacl.K256.PrecompTable.precomp_g_pow2_192_table_w4 | val precomp_g_pow2_192_table_w4:
x:glbuffer uint64 240ul{witnessed x precomp_g_pow2_192_table_lseq_w4 /\ recallable x} | val precomp_g_pow2_192_table_w4:
x:glbuffer uint64 240ul{witnessed x precomp_g_pow2_192_table_lseq_w4 /\ recallable x} | let precomp_g_pow2_192_table_w4:
x:glbuffer uint64 240ul{witnessed x precomp_g_pow2_192_table_lseq_w4 /\ recallable x} =
createL_global precomp_g_pow2_192_table_list_w4 | {
"file_name": "code/k256/Hacl.K256.PrecompTable.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 49,
"end_line": 276,
"start_col": 0,
"start_line": 274
} | module Hacl.K256.PrecompTable
open FStar.HyperStack
open FStar.HyperStack.ST
open FStar.Mul
open Lib.IntTypes
open Lib.Buffer
module ST = FStar.HyperStack.ST
module LSeq = Lib.Sequence
module LE = Lib.Exponentiation
module SE = Spec.Exponentiation
module SPT = Hacl.Spec.PrecompBaseTable
module SPT256 = Hacl.Spec.PrecompBaseTable256
module SPTK = Hacl.Spec.K256.PrecompTable
module S = Spec.K256
module SL = Spec.K256.Lemmas
open Hacl.Impl.K256.Point
include Hacl.Impl.K256.Group
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
let proj_point_to_list p =
SPTK.proj_point_to_list_lemma p;
SPTK.proj_point_to_list p
let lemma_refl x =
SPTK.proj_point_to_list_lemma x
//-----------------
inline_for_extraction noextract
let proj_g_pow2_64 : S.proj_point =
[@inline_let]
let rX : S.felem = 0x46ec0aa60b0b98c37b29371784676ad967b7beb1a941ddb6fbbff95b44cb788b in
[@inline_let]
let rY : S.felem = 0x6b946755bbc6b677576579c990a1ccf14a710545251a1428fabbf02f40268e63 in
[@inline_let]
let rZ : S.felem = 0x3c114b2ac17c199ec9eba9f7cc64dc459ca2e53f5bbead2b4e618b318ffcc00e in
(rX, rY, rZ)
val lemma_proj_g_pow2_64_eval : unit ->
Lemma (SE.exp_pow2 S.mk_k256_concrete_ops S.g 64 == proj_g_pow2_64)
let lemma_proj_g_pow2_64_eval () =
SPT256.exp_pow2_rec_is_exp_pow2 S.mk_k256_concrete_ops S.g 64;
let qX, qY, qZ = normalize_term (SPT256.exp_pow2_rec S.mk_k256_concrete_ops S.g 64) in
normalize_term_spec (SPT256.exp_pow2_rec S.mk_k256_concrete_ops S.g 64);
let rX : S.felem = 0x46ec0aa60b0b98c37b29371784676ad967b7beb1a941ddb6fbbff95b44cb788b in
let rY : S.felem = 0x6b946755bbc6b677576579c990a1ccf14a710545251a1428fabbf02f40268e63 in
let rZ : S.felem = 0x3c114b2ac17c199ec9eba9f7cc64dc459ca2e53f5bbead2b4e618b318ffcc00e in
assert_norm (qX == rX /\ qY == rY /\ qZ == rZ)
inline_for_extraction noextract
let proj_g_pow2_128 : S.proj_point =
[@inline_let]
let rX : S.felem = 0x98299efbc8e459915404ae015b48cac3b929e0158665f3c7fa5489fbd25c4297 in
[@inline_let]
let rY : S.felem = 0xf1e5cbc9579e7d11a31681e947c2959ae0298a006d1c06ab1ad93d6716ea50cc in
[@inline_let]
let rZ : S.felem = 0x5c53ffe15001674a2eacb60c9327a8b0ddbd93a0fa6d90309de6cc124133938b in
(rX, rY, rZ)
val lemma_proj_g_pow2_128_eval : unit ->
Lemma (SE.exp_pow2 S.mk_k256_concrete_ops proj_g_pow2_64 64 == proj_g_pow2_128)
let lemma_proj_g_pow2_128_eval () =
SPT256.exp_pow2_rec_is_exp_pow2 S.mk_k256_concrete_ops proj_g_pow2_64 64;
let qX, qY, qZ = normalize_term (SPT256.exp_pow2_rec S.mk_k256_concrete_ops proj_g_pow2_64 64) in
normalize_term_spec (SPT256.exp_pow2_rec S.mk_k256_concrete_ops proj_g_pow2_64 64);
let rX : S.felem = 0x98299efbc8e459915404ae015b48cac3b929e0158665f3c7fa5489fbd25c4297 in
let rY : S.felem = 0xf1e5cbc9579e7d11a31681e947c2959ae0298a006d1c06ab1ad93d6716ea50cc in
let rZ : S.felem = 0x5c53ffe15001674a2eacb60c9327a8b0ddbd93a0fa6d90309de6cc124133938b in
assert_norm (qX == rX /\ qY == rY /\ qZ == rZ)
inline_for_extraction noextract
let proj_g_pow2_192 : S.proj_point =
[@inline_let]
let rX : S.felem = 0xbd382b67d20492b1480ca58a6d7d617ba413a9bc7c2f1cff51301ef960fb245c in
[@inline_let]
let rY : S.felem = 0x0b232afcf692673aa714357c524c07867a64ea3b9dfab53f0e74622159e86b0d in
[@inline_let]
let rZ : S.felem = 0x028a1380449aede5df8219420b458d464a6a4773ac91e8305237878cef1cffa6 in
(rX, rY, rZ)
val lemma_proj_g_pow2_192_eval : unit ->
Lemma (SE.exp_pow2 S.mk_k256_concrete_ops proj_g_pow2_128 64 == proj_g_pow2_192)
let lemma_proj_g_pow2_192_eval () =
SPT256.exp_pow2_rec_is_exp_pow2 S.mk_k256_concrete_ops proj_g_pow2_128 64;
let qX, qY, qZ = normalize_term (SPT256.exp_pow2_rec S.mk_k256_concrete_ops proj_g_pow2_128 64) in
normalize_term_spec (SPT256.exp_pow2_rec S.mk_k256_concrete_ops proj_g_pow2_128 64);
let rX : S.felem = 0xbd382b67d20492b1480ca58a6d7d617ba413a9bc7c2f1cff51301ef960fb245c in
let rY : S.felem = 0x0b232afcf692673aa714357c524c07867a64ea3b9dfab53f0e74622159e86b0d in
let rZ : S.felem = 0x028a1380449aede5df8219420b458d464a6a4773ac91e8305237878cef1cffa6 in
assert_norm (qX == rX /\ qY == rY /\ qZ == rZ)
// let proj_g_pow2_64 : S.proj_point =
// normalize_term (SPT256.exp_pow2_rec S.mk_k256_concrete_ops S.g 64)
// let proj_g_pow2_128 : S.proj_point =
// normalize_term (SPT256.exp_pow2_rec S.mk_k256_concrete_ops proj_g_pow2_64 64)
// let proj_g_pow2_192 : S.proj_point =
// normalize_term (SPT256.exp_pow2_rec S.mk_k256_concrete_ops proj_g_pow2_128 64)
inline_for_extraction noextract
let proj_g_pow2_64_list : SPTK.point_list =
normalize_term (SPTK.proj_point_to_list proj_g_pow2_64)
inline_for_extraction noextract
let proj_g_pow2_128_list : SPTK.point_list =
normalize_term (SPTK.proj_point_to_list proj_g_pow2_128)
inline_for_extraction noextract
let proj_g_pow2_192_list : SPTK.point_list =
normalize_term (SPTK.proj_point_to_list proj_g_pow2_192)
let proj_g_pow2_64_lseq : LSeq.lseq uint64 15 =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_64);
Seq.seq_of_list proj_g_pow2_64_list
let proj_g_pow2_128_lseq : LSeq.lseq uint64 15 =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_128);
Seq.seq_of_list proj_g_pow2_128_list
let proj_g_pow2_192_lseq : LSeq.lseq uint64 15 =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_192);
Seq.seq_of_list proj_g_pow2_192_list
val proj_g_pow2_64_lemma: unit ->
Lemma (S.to_aff_point proj_g_pow2_64 == pow_point (pow2 64) g_aff)
let proj_g_pow2_64_lemma () =
lemma_proj_g_pow2_64_eval ();
SPT256.a_pow2_64_lemma S.mk_k256_concrete_ops S.g
val proj_g_pow2_128_lemma: unit ->
Lemma (S.to_aff_point proj_g_pow2_128 == pow_point (pow2 128) g_aff)
let proj_g_pow2_128_lemma () =
lemma_proj_g_pow2_128_eval ();
lemma_proj_g_pow2_64_eval ();
SPT256.a_pow2_128_lemma S.mk_k256_concrete_ops S.g
val proj_g_pow2_192_lemma: unit ->
Lemma (S.to_aff_point proj_g_pow2_192 == pow_point (pow2 192) g_aff)
let proj_g_pow2_192_lemma () =
lemma_proj_g_pow2_192_eval ();
lemma_proj_g_pow2_128_eval ();
lemma_proj_g_pow2_64_eval ();
SPT256.a_pow2_192_lemma S.mk_k256_concrete_ops S.g
let proj_g_pow2_64_lseq_lemma () =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_64);
proj_g_pow2_64_lemma ();
SPTK.proj_point_to_list_lemma proj_g_pow2_64
let proj_g_pow2_128_lseq_lemma () =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_128);
proj_g_pow2_128_lemma ();
SPTK.proj_point_to_list_lemma proj_g_pow2_128
let proj_g_pow2_192_lseq_lemma () =
normalize_term_spec (SPTK.proj_point_to_list proj_g_pow2_192);
proj_g_pow2_192_lemma ();
SPTK.proj_point_to_list_lemma proj_g_pow2_192
let mk_proj_g_pow2_64 () =
createL proj_g_pow2_64_list
let mk_proj_g_pow2_128 () =
createL proj_g_pow2_128_list
let mk_proj_g_pow2_192 () =
createL proj_g_pow2_192_list
//----------------
/// window size = 4; precomputed table = [[0]G, [1]G, ..., [15]G]
inline_for_extraction noextract
let precomp_basepoint_table_list_w4: x:list uint64{FStar.List.Tot.length x = 240} =
normalize_term (SPT.precomp_base_table_list mk_k256_precomp_base_table S.g 15)
let precomp_basepoint_table_lseq_w4 : LSeq.lseq uint64 240 =
normalize_term_spec (SPT.precomp_base_table_list mk_k256_precomp_base_table S.g 15);
Seq.seq_of_list precomp_basepoint_table_list_w4
let precomp_basepoint_table_lemma_w4 () =
normalize_term_spec (SPT.precomp_base_table_list mk_k256_precomp_base_table S.g 15);
SPT.precomp_base_table_lemma mk_k256_precomp_base_table S.g 16 precomp_basepoint_table_lseq_w4
let precomp_basepoint_table_w4:
x:glbuffer uint64 240ul{witnessed x precomp_basepoint_table_lseq_w4 /\ recallable x} =
createL_global precomp_basepoint_table_list_w4
/// window size = 4; precomputed table = [[0]([pow2 64]G), [1]([pow2 64]G), ..., [15]([pow2 64]G)]
inline_for_extraction noextract
let precomp_g_pow2_64_table_list_w4: x:list uint64{FStar.List.Tot.length x = 240} =
normalize_term (SPT.precomp_base_table_list mk_k256_precomp_base_table proj_g_pow2_64 15)
let precomp_g_pow2_64_table_lseq_w4 : LSeq.lseq uint64 240 =
normalize_term_spec (SPT.precomp_base_table_list mk_k256_precomp_base_table proj_g_pow2_64 15);
Seq.seq_of_list precomp_g_pow2_64_table_list_w4
let precomp_g_pow2_64_table_lemma_w4 () =
normalize_term_spec (SPT.precomp_base_table_list mk_k256_precomp_base_table proj_g_pow2_64 15);
SPT.precomp_base_table_lemma mk_k256_precomp_base_table
proj_g_pow2_64 16 precomp_g_pow2_64_table_lseq_w4;
proj_g_pow2_64_lemma ()
let precomp_g_pow2_64_table_w4:
x:glbuffer uint64 240ul{witnessed x precomp_g_pow2_64_table_lseq_w4 /\ recallable x} =
createL_global precomp_g_pow2_64_table_list_w4
/// window size = 4; precomputed table = [[0]([pow2 128]G), [1]([pow2 128]G),...,[15]([pow2 128]G)]
inline_for_extraction noextract
let precomp_g_pow2_128_table_list_w4: x:list uint64{FStar.List.Tot.length x = 240} =
normalize_term (SPT.precomp_base_table_list mk_k256_precomp_base_table proj_g_pow2_128 15)
let precomp_g_pow2_128_table_lseq_w4 : LSeq.lseq uint64 240 =
normalize_term_spec (SPT.precomp_base_table_list mk_k256_precomp_base_table proj_g_pow2_128 15);
Seq.seq_of_list precomp_g_pow2_128_table_list_w4
let precomp_g_pow2_128_table_lemma_w4 () =
normalize_term_spec (SPT.precomp_base_table_list mk_k256_precomp_base_table proj_g_pow2_128 15);
SPT.precomp_base_table_lemma mk_k256_precomp_base_table
proj_g_pow2_128 16 precomp_g_pow2_64_table_lseq_w4;
proj_g_pow2_128_lemma ()
let precomp_g_pow2_128_table_w4:
x:glbuffer uint64 240ul{witnessed x precomp_g_pow2_128_table_lseq_w4 /\ recallable x} =
createL_global precomp_g_pow2_128_table_list_w4
/// window size = 4; precomputed table = [[0]([pow2 192]G), [1]([pow2 192]G),...,[15]([pow2 192]G)]
inline_for_extraction noextract
let precomp_g_pow2_192_table_list_w4: x:list uint64{FStar.List.Tot.length x = 240} =
normalize_term (SPT.precomp_base_table_list mk_k256_precomp_base_table proj_g_pow2_192 15)
let precomp_g_pow2_192_table_lseq_w4 : LSeq.lseq uint64 240 =
normalize_term_spec (SPT.precomp_base_table_list mk_k256_precomp_base_table proj_g_pow2_192 15);
Seq.seq_of_list precomp_g_pow2_192_table_list_w4
let precomp_g_pow2_192_table_lemma_w4 () =
normalize_term_spec (SPT.precomp_base_table_list mk_k256_precomp_base_table proj_g_pow2_192 15);
SPT.precomp_base_table_lemma mk_k256_precomp_base_table
proj_g_pow2_192 16 precomp_g_pow2_64_table_lseq_w4;
proj_g_pow2_192_lemma () | {
"checked_file": "/",
"dependencies": [
"Spec.K256.Lemmas.fsti.checked",
"Spec.K256.fst.checked",
"Spec.Exponentiation.fsti.checked",
"prims.fst.checked",
"Lib.Sequence.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Lib.Exponentiation.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.PrecompBaseTable256.fsti.checked",
"Hacl.Spec.PrecompBaseTable.fsti.checked",
"Hacl.Spec.K256.PrecompTable.fsti.checked",
"Hacl.Impl.K256.Point.fsti.checked",
"Hacl.Impl.K256.Group.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": true,
"source_file": "Hacl.K256.PrecompTable.fst"
} | [
{
"abbrev": true,
"full_module": "Spec.K256.Lemmas",
"short_module": "SL"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.K256.PrecompTable",
"short_module": "SPTK"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.PrecompBaseTable256",
"short_module": "SPT256"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation",
"short_module": "LE"
},
{
"abbrev": false,
"full_module": "Hacl.Impl.K256.Group",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl.K256.Point",
"short_module": null
},
{
"abbrev": true,
"full_module": "Spec.K256",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.PrecompBaseTable",
"short_module": "SPT"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.Exponentiation.Definitions",
"short_module": "BE"
},
{
"abbrev": true,
"full_module": "Spec.Exponentiation",
"short_module": "SE"
},
{
"abbrev": true,
"full_module": "Lib.Exponentiation.Definition",
"short_module": "LE"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.K256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x:
Lib.Buffer.glbuffer Lib.IntTypes.uint64 240ul
{ Lib.Buffer.witnessed x Hacl.K256.PrecompTable.precomp_g_pow2_192_table_lseq_w4 /\
Lib.Buffer.recallable x } | Prims.Tot | [
"total"
] | [] | [
"Lib.Buffer.createL_global",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"Hacl.K256.PrecompTable.precomp_g_pow2_192_table_list_w4",
"Lib.Buffer.glbuffer",
"Lib.IntTypes.size",
"FStar.Pervasives.normalize_term",
"Lib.IntTypes.size_nat",
"FStar.List.Tot.Base.length",
"Lib.IntTypes.uint64",
"FStar.UInt32.__uint_to_t",
"Prims.l_and",
"Lib.Buffer.witnessed",
"Hacl.K256.PrecompTable.precomp_g_pow2_192_table_lseq_w4",
"Lib.Buffer.recallable",
"Lib.Buffer.CONST"
] | [] | false | false | false | false | false | let precomp_g_pow2_192_table_w4:x:
glbuffer uint64 240ul {witnessed x precomp_g_pow2_192_table_lseq_w4 /\ recallable x} =
| createL_global precomp_g_pow2_192_table_list_w4 | false |
Hacl.Spec.P256.Montgomery.fst | Hacl.Spec.P256.Montgomery.qmont_cancel_lemma2 | val qmont_cancel_lemma2: a:S.qelem -> b:S.qelem ->
Lemma (to_qmont a * from_qmont b % S.order = a * b % S.order) | val qmont_cancel_lemma2: a:S.qelem -> b:S.qelem ->
Lemma (to_qmont a * from_qmont b % S.order = a * b % S.order) | let qmont_cancel_lemma2 a b =
calc (==) {
to_qmont a * from_qmont b % S.order;
(==) { }
(a * qmont_R % S.order * (b * qmont_R_inv % S.order)) % S.order;
(==) { Math.Lemmas.lemma_mod_mul_distr_r (a * qmont_R % S.order) (b * qmont_R_inv) S.order }
(a * qmont_R % S.order * (b * qmont_R_inv)) % S.order;
(==) { Math.Lemmas.paren_mul_right (a * qmont_R % S.order) b qmont_R_inv }
(a * qmont_R % S.order * b * qmont_R_inv) % S.order;
(==) { qmont_cancel_lemma1 a b }
a * b % S.order;
} | {
"file_name": "code/ecdsap256/Hacl.Spec.P256.Montgomery.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 3,
"end_line": 335,
"start_col": 0,
"start_line": 324
} | module Hacl.Spec.P256.Montgomery
open FStar.Mul
open Lib.IntTypes
module S = Spec.P256
module M = Lib.NatMod
module BD = Hacl.Spec.Bignum.Definitions
module SBM = Hacl.Spec.Bignum.Montgomery
module SBML = Hacl.Spec.Montgomery.Lemmas
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
/// Montgomery arithmetic for a base field
val lemma_abc_is_acb (a b c:nat) : Lemma (a * b * c = a * c * b)
let lemma_abc_is_acb a b c =
Math.Lemmas.paren_mul_right a b c;
Math.Lemmas.swap_mul b c;
Math.Lemmas.paren_mul_right a c b
val lemma_mod_mul_assoc (n:pos) (a b c:nat) : Lemma ((a * b % n) * c % n == (a * (b * c % n)) % n)
let lemma_mod_mul_assoc m a b c =
calc (==) {
(a * b % m) * c % m;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a * b) c m }
(a * b) * c % m;
(==) { Math.Lemmas.paren_mul_right a b c }
a * (b * c) % m;
(==) { Math.Lemmas.lemma_mod_mul_distr_r a (b * c) m }
a * (b * c % m) % m;
}
val lemma_to_from_mont_id_gen (n mont_R mont_R_inv:pos) (a:nat{a < n}) : Lemma
(requires mont_R * mont_R_inv % n = 1)
(ensures (a * mont_R % n) * mont_R_inv % n == a)
let lemma_to_from_mont_id_gen n mont_R mont_R_inv a =
lemma_mod_mul_assoc n a mont_R mont_R_inv;
Math.Lemmas.modulo_lemma a n
val lemma_from_to_mont_id_gen (n mont_R mont_R_inv:pos) (a:nat{a < n}) : Lemma
(requires mont_R_inv * mont_R % n = 1)
(ensures (a * mont_R_inv % n) * mont_R % n == a)
let lemma_from_to_mont_id_gen n mont_R mont_R_inv a =
lemma_to_from_mont_id_gen n mont_R_inv mont_R a
val mont_mul_lemma_gen (n:pos) (mont_R_inv a b: nat) :
Lemma (((a * mont_R_inv % n) * (b * mont_R_inv % n)) % n ==
((a * b * mont_R_inv) % n) * mont_R_inv % n)
let mont_mul_lemma_gen n mont_R_inv a b =
calc (==) {
((a * mont_R_inv % n) * (b * mont_R_inv % n)) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l
(a * mont_R_inv) (b * mont_R_inv % n) n }
(a * mont_R_inv * (b * mont_R_inv % n)) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_r (a * mont_R_inv) (b * mont_R_inv) n }
(a * mont_R_inv * (b * mont_R_inv)) % n;
(==) { Math.Lemmas.paren_mul_right a mont_R_inv (b * mont_R_inv) }
(a * (mont_R_inv * (b * mont_R_inv))) % n;
(==) { Math.Lemmas.paren_mul_right mont_R_inv b mont_R_inv }
(a * (mont_R_inv * b * mont_R_inv)) % n;
(==) { Math.Lemmas.swap_mul mont_R_inv b }
(a * (b * mont_R_inv * mont_R_inv)) % n;
(==) { Math.Lemmas.paren_mul_right a (b * mont_R_inv) mont_R_inv }
(a * (b * mont_R_inv) * mont_R_inv) % n;
(==) { Math.Lemmas.paren_mul_right a b mont_R_inv }
(a * b * mont_R_inv * mont_R_inv) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a * b * mont_R_inv) mont_R_inv n }
((a * b * mont_R_inv) % n) * mont_R_inv % n;
}
val mont_add_lemma_gen (n:pos) (mont_R_inv a b: nat) :
Lemma ((a * mont_R_inv % n + b * mont_R_inv % n) % n == (a + b) % n * mont_R_inv % n)
let mont_add_lemma_gen n mont_R_inv a b =
calc (==) {
(a * mont_R_inv % n + b * mont_R_inv % n) % n;
(==) { Math.Lemmas.modulo_distributivity (a * mont_R_inv) (b * mont_R_inv) n }
(a * mont_R_inv + b * mont_R_inv) % n;
(==) { Math.Lemmas.distributivity_add_left a b mont_R_inv }
(a + b) * mont_R_inv % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a + b) mont_R_inv n }
(a + b) % n * mont_R_inv % n;
}
val mont_sub_lemma_gen (n:pos) (mont_R_inv a b: nat) :
Lemma ((a * mont_R_inv % n - b * mont_R_inv % n) % n == (a - b) % n * mont_R_inv % n)
let mont_sub_lemma_gen n mont_R_inv a b =
calc (==) {
(a * mont_R_inv % n - b * mont_R_inv % n) % n;
(==) { Math.Lemmas.lemma_mod_sub_distr (a * mont_R_inv % n) (b * mont_R_inv) n }
(a * mont_R_inv % n - b * mont_R_inv) % n;
(==) { Math.Lemmas.lemma_mod_plus_distr_l (a * mont_R_inv) (- b * mont_R_inv) n }
(a * mont_R_inv - b * mont_R_inv) % n;
(==) { Math.Lemmas.distributivity_sub_left a b mont_R_inv }
(a - b) * mont_R_inv % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a - b) mont_R_inv n }
(a - b) % n * mont_R_inv % n;
}
val lemma_mont_inv_gen (n:pos{1 < n}) (mont_R:pos) (mont_R_inv:nat{mont_R_inv < n}) (a:nat{a < n}) :
Lemma
(requires M.pow_mod #n mont_R_inv (n - 2) == mont_R % n)
(ensures M.pow_mod #n (a * mont_R_inv % n) (n - 2) == M.pow_mod #n a (n - 2) * mont_R % n)
let lemma_mont_inv_gen n mont_R mont_R_inv k =
M.lemma_pow_mod #n (k * mont_R_inv % n) (n - 2);
// assert (M.pow_mod #n (k * mont_R_inv % n) (n - 2) ==
// M.pow (k * mont_R_inv % n) (n - 2) % n);
M.lemma_pow_mod_base (k * mont_R_inv) (n - 2) n;
// == M.pow (k * mont_R_inv) (n - 2) % n
M.lemma_pow_mul_base k mont_R_inv (n - 2);
// == M.pow k (n - 2) * M.pow mont_R_inv (n - 2) % n
Math.Lemmas.lemma_mod_mul_distr_r (M.pow k (n - 2)) (M.pow mont_R_inv (n - 2)) n;
// == M.pow k (n - 2) * (M.pow mont_R_inv (n - 2) % n) % n
M.lemma_pow_mod #n mont_R_inv (n - 2);
assert (M.pow_mod #n (k * mont_R_inv % n) (n - 2) == M.pow k (n - 2) * (mont_R % n) % n);
Math.Lemmas.lemma_mod_mul_distr_r (M.pow k (n - 2)) mont_R n;
// == M.pow k (n - 2) * mont_R % n
Math.Lemmas.lemma_mod_mul_distr_l (M.pow k (n - 2)) mont_R n;
// == M.pow k (n - 2) % n * mont_R % n
M.lemma_pow_mod #n k (n - 2)
let mont_cancel_lemma_gen n mont_R mont_R_inv a b =
calc (==) {
(a * mont_R % n * b * mont_R_inv) % n;
(==) { Math.Lemmas.paren_mul_right (a * mont_R % n) b mont_R_inv }
(a * mont_R % n * (b * mont_R_inv)) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a * mont_R) (b * mont_R_inv) n }
(a * mont_R * (b * mont_R_inv)) % n;
(==) { Math.Lemmas.paren_mul_right a mont_R (b * mont_R_inv);
Math.Lemmas.swap_mul mont_R (b * mont_R_inv) }
(a * (b * mont_R_inv * mont_R)) % n;
(==) { Math.Lemmas.paren_mul_right b mont_R_inv mont_R }
(a * (b * (mont_R_inv * mont_R))) % n;
(==) { Math.Lemmas.paren_mul_right a b (mont_R_inv * mont_R) }
(a * b * (mont_R_inv * mont_R)) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_r (a * b) (mont_R_inv * mont_R) n }
(a * b * (mont_R_inv * mont_R % n)) % n;
(==) { assert (mont_R_inv * mont_R % n = 1) }
(a * b) % n;
}
let fmont_R_inv =
let d, _ = SBML.eea_pow2_odd 256 S.prime in d % S.prime
let mul_fmont_R_and_R_inv_is_one () =
let d, k = SBML.eea_pow2_odd 256 S.prime in
SBML.mont_preconditions_d 64 4 S.prime;
assert (d * pow2 256 % S.prime = 1);
Math.Lemmas.lemma_mod_mul_distr_l d (pow2 256) S.prime
//--------------------------------------//
// bn_mont_reduction is x * fmont_R_inv //
//--------------------------------------//
val lemma_prime_mont: unit ->
Lemma (S.prime % 2 = 1 /\ S.prime < pow2 256 /\ (1 + S.prime) % pow2 64 = 0)
let lemma_prime_mont () =
assert_norm (S.prime % 2 = 1);
assert_norm (S.prime < pow2 256);
assert_norm ((1 + S.prime) % pow2 64 = 0)
let bn_mont_reduction_lemma x n =
lemma_prime_mont ();
assert (SBM.bn_mont_pre n (u64 1));
let d, _ = SBML.eea_pow2_odd 256 (BD.bn_v n) in
let res = SBM.bn_mont_reduction n (u64 1) x in
assert_norm (S.prime * S.prime < S.prime * pow2 256);
assert (BD.bn_v x < S.prime * pow2 256);
SBM.bn_mont_reduction_lemma n (u64 1) x;
assert (BD.bn_v res == SBML.mont_reduction 64 4 (BD.bn_v n) 1 (BD.bn_v x));
SBML.mont_reduction_lemma 64 4 (BD.bn_v n) 1 (BD.bn_v x);
assert (BD.bn_v res == BD.bn_v x * d % S.prime);
calc (==) {
BD.bn_v x * d % S.prime;
(==) { Math.Lemmas.lemma_mod_mul_distr_r (BD.bn_v x) d S.prime }
BD.bn_v x * (d % S.prime) % S.prime;
(==) { }
BD.bn_v x * fmont_R_inv % S.prime;
}
//---------------------------
let lemma_from_mont_zero a =
Spec.P256.Lemmas.prime_lemma ();
Lib.NatMod.lemma_mul_mod_prime_zero #S.prime a fmont_R_inv
let lemma_to_from_mont_id a =
mul_fmont_R_and_R_inv_is_one ();
lemma_to_from_mont_id_gen S.prime fmont_R fmont_R_inv a
let lemma_from_to_mont_id a =
mul_fmont_R_and_R_inv_is_one ();
lemma_from_to_mont_id_gen S.prime fmont_R fmont_R_inv a
let fmont_mul_lemma a b =
mont_mul_lemma_gen S.prime fmont_R_inv a b
let fmont_add_lemma a b =
mont_add_lemma_gen S.prime fmont_R_inv a b
let fmont_sub_lemma a b =
mont_sub_lemma_gen S.prime fmont_R_inv a b
/// Montgomery arithmetic for a scalar field
let qmont_R_inv =
let d, _ = SBML.eea_pow2_odd 256 S.order in d % S.order
let mul_qmont_R_and_R_inv_is_one () =
let d, k = SBML.eea_pow2_odd 256 S.order in
SBML.mont_preconditions_d 64 4 S.order;
assert (d * pow2 256 % S.order = 1);
Math.Lemmas.lemma_mod_mul_distr_l d (pow2 256) S.order;
assert (d % S.order * pow2 256 % S.order = 1)
//--------------------------------------//
// bn_mont_reduction is x * qmont_R_inv //
//--------------------------------------//
val lemma_order_mont: unit ->
Lemma (S.order % 2 = 1 /\ S.order < pow2 256 /\ (1 + S.order * 0xccd1c8aaee00bc4f) % pow2 64 = 0)
let lemma_order_mont () =
assert_norm (S.order % 2 = 1);
assert_norm (S.order < pow2 256);
assert_norm ((1 + S.order * 0xccd1c8aaee00bc4f) % pow2 64 = 0)
let bn_qmont_reduction_lemma x n =
let k0 = 0xccd1c8aaee00bc4f in
lemma_order_mont ();
assert (SBM.bn_mont_pre n (u64 k0));
let d, _ = SBML.eea_pow2_odd 256 (BD.bn_v n) in
let res = SBM.bn_mont_reduction n (u64 k0) x in
assert_norm (S.order * S.order < S.order * pow2 256);
assert (BD.bn_v x < S.order * pow2 256);
SBM.bn_mont_reduction_lemma n (u64 k0) x;
assert (BD.bn_v res == SBML.mont_reduction 64 4 (BD.bn_v n) k0 (BD.bn_v x));
SBML.mont_reduction_lemma 64 4 (BD.bn_v n) k0 (BD.bn_v x);
assert (BD.bn_v res == BD.bn_v x * d % S.order);
calc (==) {
(BD.bn_v x) * d % S.order;
(==) { Math.Lemmas.lemma_mod_mul_distr_r (BD.bn_v x) d S.order }
(BD.bn_v x) * (d % S.order) % S.order;
(==) { }
(BD.bn_v x) * qmont_R_inv % S.order;
}
//--------------------------
let lemma_to_from_qmont_id a =
mul_qmont_R_and_R_inv_is_one ();
lemma_to_from_mont_id_gen S.order qmont_R qmont_R_inv a
let lemma_from_to_qmont_id a =
mul_qmont_R_and_R_inv_is_one ();
Math.Lemmas.swap_mul qmont_R qmont_R_inv;
lemma_from_to_mont_id_gen S.order qmont_R qmont_R_inv a
let qmont_add_lemma a b =
mont_add_lemma_gen S.order qmont_R_inv a b
let qmont_mul_lemma a b =
mont_mul_lemma_gen S.order qmont_R_inv a b
let qmont_inv_lemma k =
assert_norm (M.pow_mod_ #S.order qmont_R_inv (S.order - 2) == qmont_R % S.order);
M.pow_mod_def #S.order qmont_R_inv (S.order - 2);
assert (M.pow_mod #S.order qmont_R_inv (S.order - 2) == qmont_R % S.order);
lemma_mont_inv_gen S.order qmont_R qmont_R_inv k;
assert (M.pow_mod #S.order (k * qmont_R_inv % S.order) (S.order - 2) ==
M.pow_mod #S.order k (S.order - 2) * qmont_R % S.order);
assert (S.qinv (k * qmont_R_inv % S.order) == S.qinv k * qmont_R % S.order)
val qmont_cancel_lemma1: a:S.qelem -> b:S.qelem ->
Lemma ((a * qmont_R % S.order * b * qmont_R_inv) % S.order = a * b % S.order)
let qmont_cancel_lemma1 a b =
mul_qmont_R_and_R_inv_is_one ();
mont_cancel_lemma_gen S.order qmont_R qmont_R_inv a b
val qmont_cancel_lemma2: a:S.qelem -> b:S.qelem ->
Lemma (to_qmont a * from_qmont b % S.order = a * b % S.order) | {
"checked_file": "/",
"dependencies": [
"Spec.P256.Lemmas.fsti.checked",
"Spec.P256.fst.checked",
"prims.fst.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Hacl.Spec.Montgomery.Lemmas.fst.checked",
"Hacl.Spec.Bignum.Montgomery.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.P256.Montgomery.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Spec.Montgomery.Lemmas",
"short_module": "SBML"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SBM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.P256",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SBM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.P256",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec.P256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec.P256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: Spec.P256.PointOps.qelem -> b: Spec.P256.PointOps.qelem
-> FStar.Pervasives.Lemma
(ensures
Hacl.Spec.P256.Montgomery.to_qmont a * Hacl.Spec.P256.Montgomery.from_qmont b %
Spec.P256.PointOps.order =
a * b % Spec.P256.PointOps.order) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Spec.P256.PointOps.qelem",
"FStar.Calc.calc_finish",
"Prims.int",
"Prims.eq2",
"Prims.op_Modulus",
"FStar.Mul.op_Star",
"Hacl.Spec.P256.Montgomery.to_qmont",
"Hacl.Spec.P256.Montgomery.from_qmont",
"Spec.P256.PointOps.order",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"Hacl.Spec.P256.Montgomery.qmont_R",
"Hacl.Spec.P256.Montgomery.qmont_R_inv",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"Prims.squash",
"FStar.Math.Lemmas.lemma_mod_mul_distr_r",
"FStar.Math.Lemmas.paren_mul_right",
"Hacl.Spec.P256.Montgomery.qmont_cancel_lemma1"
] | [] | false | false | true | false | false | let qmont_cancel_lemma2 a b =
| calc ( == ) {
to_qmont a * from_qmont b % S.order;
( == ) { () }
((a * qmont_R % S.order) * (b * qmont_R_inv % S.order)) % S.order;
( == ) { Math.Lemmas.lemma_mod_mul_distr_r (a * qmont_R % S.order) (b * qmont_R_inv) S.order }
((a * qmont_R % S.order) * (b * qmont_R_inv)) % S.order;
( == ) { Math.Lemmas.paren_mul_right (a * qmont_R % S.order) b qmont_R_inv }
(((a * qmont_R % S.order) * b) * qmont_R_inv) % S.order;
( == ) { qmont_cancel_lemma1 a b }
a * b % S.order;
} | false |
FStar.Math.Euclid.fst | FStar.Math.Euclid.euclid | val euclid (n:pos) (a b r s:int) : Lemma
(requires (a * b) % n = 0 /\ r * n + s * a = 1)
(ensures b % n = 0) | val euclid (n:pos) (a b r s:int) : Lemma
(requires (a * b) % n = 0 /\ r * n + s * a = 1)
(ensures b % n = 0) | let euclid n a b r s =
let open FStar.Math.Lemmas in
calc (==) {
b % n;
== { distributivity_add_left (r * n) (s * a) b }
(r * n * b + s * a * b) % n;
== { paren_mul_right s a b }
(r * n * b + s * (a * b)) % n;
== { modulo_distributivity (r * n * b) (s * (a * b)) n }
((r * n * b) % n + s * (a * b) % n) % n;
== { lemma_mod_mul_distr_r s (a * b) n }
((r * n * b) % n + s * ((a * b) % n) % n) % n;
== { assert (a * b % n = 0) }
((r * n * b) % n + s * 0 % n) % n;
== { assert (s * 0 == 0) }
((r * n * b) % n + 0 % n) % n;
== { modulo_lemma 0 n }
((r * n * b) % n) % n;
== { lemma_mod_twice (r * n * b) n }
(r * n * b) % n;
== { _ by (FStar.Tactics.Canon.canon ()) }
(n * (r * b)) % n;
== { lemma_mod_mul_distr_l n (r * b) n}
n % n * (r * b) % n;
== { assert (n % n = 0) }
(0 * (r * b)) % n;
== { assert (0 * (r * b) == 0) }
0 % n;
== { small_mod 0 n }
0;
} | {
"file_name": "ulib/FStar.Math.Euclid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 3,
"end_line": 250,
"start_col": 0,
"start_line": 220
} | module FStar.Math.Euclid
open FStar.Mul
open FStar.Math.Lemmas
///
/// Auxiliary lemmas
///
val eq_mult_left (a b:int) : Lemma (requires a = b * a) (ensures a = 0 \/ b = 1)
let eq_mult_left a b = ()
val eq_mult_one (a b:int) : Lemma
(requires a * b = 1)
(ensures (a = 1 /\ b = 1) \/ (a = -1 /\ b = -1))
let eq_mult_one a b = ()
val opp_idempotent (a:int) : Lemma (-(-a) == a)
let opp_idempotent a = ()
val add_sub_l (a b:int) : Lemma (a - b + b = a)
let add_sub_l a b = ()
val add_sub_r (a b:int) : Lemma (a + b - b = a)
let add_sub_r a b = ()
///
/// Divides relation
///
let divides_reflexive a =
Classical.exists_intro (fun q -> a = q * a) 1
let divides_transitive a b c =
eliminate exists q1. b == q1 * a
returns a `divides` c
with _pf.
eliminate exists q2. c == q2 * b
returns _
with _pf2.
introduce exists q. c == q * a
with (q1 * q2)
and ()
let divide_antisym a b =
if a <> 0 then
Classical.exists_elim (a = b \/ a = -b) (Squash.get_proof (exists q1. b = q1 * a))
(fun q1 ->
Classical.exists_elim (a = b \/ a = -b) (Squash.get_proof (exists q2. a = q2 * b))
(fun q2 ->
assert (b = q1 * a);
assert (a = q2 * b);
assert (b = q1 * (q2 * b));
paren_mul_right q1 q2 b;
eq_mult_left b (q1 * q2);
eq_mult_one q1 q2))
let divides_0 a =
Classical.exists_intro (fun q -> 0 = q * a) 0
let divides_1 a = ()
let divides_minus a b =
Classical.exists_elim (a `divides` (-b))
(Squash.get_proof (a `divides` b))
(fun q -> Classical.exists_intro (fun q' -> -b = q' * a) (-q))
let divides_opp a b =
Classical.exists_elim ((-a) `divides` b)
(Squash.get_proof (a `divides` b))
(fun q -> Classical.exists_intro (fun q' -> b = q' * (-a)) (-q))
let divides_plus a b d =
Classical.exists_elim (d `divides` (a + b)) (Squash.get_proof (exists q1. a = q1 * d))
(fun q1 ->
Classical.exists_elim (d `divides` (a + b)) (Squash.get_proof (exists q2. b = q2 * d))
(fun q2 ->
assert (a + b = q1 * d + q2 * d);
distributivity_add_left q1 q2 d;
Classical.exists_intro (fun q -> a + b = q * d) (q1 + q2)))
let divides_sub a b d =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
divides_plus a (-b) d
let divides_mult_right a b d =
Classical.exists_elim (d `divides` (a * b)) (Squash.get_proof (d `divides` b))
(fun q ->
paren_mul_right a q d;
Classical.exists_intro (fun r -> a * b = r * d) (a * q))
///
/// GCD
///
let mod_divides a b =
Classical.exists_intro (fun q -> a = q * b) (a / b)
let divides_mod a b =
Classical.exists_elim (a % b = 0) (Squash.get_proof (b `divides` a))
(fun q -> cancel_mul_div q b)
let is_gcd_unique a b c d =
divide_antisym c d
let is_gcd_reflexive a = ()
let is_gcd_symmetric a b d = ()
let is_gcd_0 a = ()
let is_gcd_1 a = ()
let is_gcd_minus a b d =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
opp_idempotent b
let is_gcd_opp a b d =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
divides_opp d a;
divides_opp d b
let is_gcd_plus a b q d =
add_sub_r b (q * a);
Classical.forall_intro_3 (Classical.move_requires_3 divides_plus);
Classical.forall_intro_3 (Classical.move_requires_3 divides_mult_right);
Classical.forall_intro_3 (Classical.move_requires_3 divides_sub)
///
/// Extended Euclidean algorithm
///
val is_gcd_for_euclid (a b q d:int) : Lemma
(requires is_gcd b (a - q * b) d)
(ensures is_gcd a b d)
let is_gcd_for_euclid a b q d =
add_sub_l a (q * b);
is_gcd_plus b (a - q * b) q d
val egcd (a b u1 u2 u3 v1 v2 v3:int) : Pure (int & int & int)
(requires v3 >= 0 /\
u1 * a + u2 * b = u3 /\
v1 * a + v2 * b = v3 /\
(forall d. is_gcd u3 v3 d ==> is_gcd a b d))
(ensures (fun (u, v, d) -> u * a + v * b = d /\ is_gcd a b d))
(decreases v3)
let rec egcd a b u1 u2 u3 v1 v2 v3 =
if v3 = 0 then
begin
divides_0 u3;
(u1, u2, u3)
end
else
begin
let q = u3 / v3 in
euclidean_division_definition u3 v3;
assert (u3 - q * v3 = (q * v3 + u3 % v3) - q * v3);
assert (q * v3 - q * v3 = 0);
swap_add_plus_minus (q * v3) (u3 % v3) (q * v3);
calc (==) {
(u1 - q * v1) * a + (u2 - q * v2) * b;
== { _ by (FStar.Tactics.Canon.canon()) }
(u1 * a + u2 * b) - q * (v1 * a + v2 * b);
== { }
u3 - q * v3;
== { lemma_div_mod u3 v3 }
u3 % v3;
};
let u1, v1 = v1, u1 - q * v1 in
let u2, v2 = v2, u2 - q * v2 in
let u3' = u3 in
let v3' = v3 in
let u3, v3 = v3, u3 - q * v3 in
(* proving the implication in the precondition *)
introduce forall d. is_gcd v3' (u3' - q * v3') d ==> is_gcd u3' v3' d with
introduce _ ==> _ with _.
is_gcd_for_euclid u3' v3' q d;
let r = egcd a b u1 u2 u3 v1 v2 v3 in
r
end
let euclid_gcd a b =
if b >= 0 then
egcd a b 1 0 a 0 1 b
else (
introduce forall d. is_gcd a (-b) d ==> is_gcd a b d
with introduce _ ==> _
with _pf.
(is_gcd_minus a b d;
is_gcd_symmetric b a d);
let res = egcd a b 1 0 a 0 (-1) (-b) in
let _, _, d = res in
assert (is_gcd a b d);
res
)
val is_gcd_prime_aux (p:int) (a:pos{a < p}) (d:int) : Lemma
(requires is_prime p /\ d `divides` p /\ d `divides` a)
(ensures d = 1 \/ d = -1)
let is_gcd_prime_aux p a d = ()
val is_gcd_prime (p:int{is_prime p}) (a:pos{a < p}) : Lemma (is_gcd p a 1)
let is_gcd_prime p a =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
Classical.forall_intro (Classical.move_requires (is_gcd_prime_aux p a));
assert (forall x. x `divides` p /\ x `divides` a ==> x = 1 \/ x = -1 /\ x `divides` 1)
let bezout_prime p a =
let r, s, d = euclid_gcd p a in
assert (r * p + s * a = d);
assert (is_gcd p a d);
is_gcd_prime p a;
is_gcd_unique p a 1 d;
assert (d = 1 \/ d = -1);
assert ((-r) * p + (-s) * a == -(r * p + s * a)) by (FStar.Tactics.Canon.canon());
if d = 1 then r, s else -r, -s | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.Canon.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Classical.Sugar.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "FStar.Math.Euclid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | n: Prims.pos -> a: Prims.int -> b: Prims.int -> r: Prims.int -> s: Prims.int
-> FStar.Pervasives.Lemma (requires a * b % n = 0 /\ r * n + s * a = 1) (ensures b % n = 0) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.pos",
"Prims.int",
"FStar.Calc.calc_finish",
"Prims.eq2",
"Prims.op_Modulus",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"FStar.Mul.op_Star",
"Prims.op_Addition",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"FStar.Math.Lemmas.distributivity_add_left",
"Prims.squash",
"FStar.Math.Lemmas.paren_mul_right",
"FStar.Math.Lemmas.modulo_distributivity",
"FStar.Math.Lemmas.lemma_mod_mul_distr_r",
"Prims._assert",
"Prims.b2t",
"Prims.op_Equality",
"FStar.Math.Lemmas.modulo_lemma",
"FStar.Math.Lemmas.lemma_mod_twice",
"FStar.Math.Lemmas.lemma_mod_mul_distr_l",
"FStar.Math.Lemmas.small_mod"
] | [] | false | false | true | false | false | let euclid n a b r s =
| let open FStar.Math.Lemmas in
calc ( == ) {
b % n;
( == ) { distributivity_add_left (r * n) (s * a) b }
((r * n) * b + (s * a) * b) % n;
( == ) { paren_mul_right s a b }
((r * n) * b + s * (a * b)) % n;
( == ) { modulo_distributivity ((r * n) * b) (s * (a * b)) n }
(((r * n) * b) % n + s * (a * b) % n) % n;
( == ) { lemma_mod_mul_distr_r s (a * b) n }
(((r * n) * b) % n + s * ((a * b) % n) % n) % n;
( == ) { assert (a * b % n = 0) }
(((r * n) * b) % n + s * 0 % n) % n;
( == ) { assert (s * 0 == 0) }
(((r * n) * b) % n + 0 % n) % n;
( == ) { modulo_lemma 0 n }
(((r * n) * b) % n) % n;
( == ) { lemma_mod_twice ((r * n) * b) n }
((r * n) * b) % n;
( == ) { FStar.Tactics.Effect.synth_by_tactic (fun _ -> (FStar.Tactics.Canon.canon ())) }
(n * (r * b)) % n;
( == ) { lemma_mod_mul_distr_l n (r * b) n }
(n % n) * (r * b) % n;
( == ) { assert (n % n = 0) }
(0 * (r * b)) % n;
( == ) { assert (0 * (r * b) == 0) }
0 % n;
( == ) { small_mod 0 n }
0;
} | false |
FStar.Math.Euclid.fst | FStar.Math.Euclid.euclid_gcd | val euclid_gcd (a b:int) : Pure (int & int & int)
(requires True)
(ensures fun (r, s, d) -> r * a + s * b = d /\ is_gcd a b d) | val euclid_gcd (a b:int) : Pure (int & int & int)
(requires True)
(ensures fun (r, s, d) -> r * a + s * b = d /\ is_gcd a b d) | let euclid_gcd a b =
if b >= 0 then
egcd a b 1 0 a 0 1 b
else (
introduce forall d. is_gcd a (-b) d ==> is_gcd a b d
with introduce _ ==> _
with _pf.
(is_gcd_minus a b d;
is_gcd_symmetric b a d);
let res = egcd a b 1 0 a 0 (-1) (-b) in
let _, _, d = res in
assert (is_gcd a b d);
res
) | {
"file_name": "ulib/FStar.Math.Euclid.fst",
"git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | {
"end_col": 3,
"end_line": 197,
"start_col": 0,
"start_line": 184
} | module FStar.Math.Euclid
open FStar.Mul
open FStar.Math.Lemmas
///
/// Auxiliary lemmas
///
val eq_mult_left (a b:int) : Lemma (requires a = b * a) (ensures a = 0 \/ b = 1)
let eq_mult_left a b = ()
val eq_mult_one (a b:int) : Lemma
(requires a * b = 1)
(ensures (a = 1 /\ b = 1) \/ (a = -1 /\ b = -1))
let eq_mult_one a b = ()
val opp_idempotent (a:int) : Lemma (-(-a) == a)
let opp_idempotent a = ()
val add_sub_l (a b:int) : Lemma (a - b + b = a)
let add_sub_l a b = ()
val add_sub_r (a b:int) : Lemma (a + b - b = a)
let add_sub_r a b = ()
///
/// Divides relation
///
let divides_reflexive a =
Classical.exists_intro (fun q -> a = q * a) 1
let divides_transitive a b c =
eliminate exists q1. b == q1 * a
returns a `divides` c
with _pf.
eliminate exists q2. c == q2 * b
returns _
with _pf2.
introduce exists q. c == q * a
with (q1 * q2)
and ()
let divide_antisym a b =
if a <> 0 then
Classical.exists_elim (a = b \/ a = -b) (Squash.get_proof (exists q1. b = q1 * a))
(fun q1 ->
Classical.exists_elim (a = b \/ a = -b) (Squash.get_proof (exists q2. a = q2 * b))
(fun q2 ->
assert (b = q1 * a);
assert (a = q2 * b);
assert (b = q1 * (q2 * b));
paren_mul_right q1 q2 b;
eq_mult_left b (q1 * q2);
eq_mult_one q1 q2))
let divides_0 a =
Classical.exists_intro (fun q -> 0 = q * a) 0
let divides_1 a = ()
let divides_minus a b =
Classical.exists_elim (a `divides` (-b))
(Squash.get_proof (a `divides` b))
(fun q -> Classical.exists_intro (fun q' -> -b = q' * a) (-q))
let divides_opp a b =
Classical.exists_elim ((-a) `divides` b)
(Squash.get_proof (a `divides` b))
(fun q -> Classical.exists_intro (fun q' -> b = q' * (-a)) (-q))
let divides_plus a b d =
Classical.exists_elim (d `divides` (a + b)) (Squash.get_proof (exists q1. a = q1 * d))
(fun q1 ->
Classical.exists_elim (d `divides` (a + b)) (Squash.get_proof (exists q2. b = q2 * d))
(fun q2 ->
assert (a + b = q1 * d + q2 * d);
distributivity_add_left q1 q2 d;
Classical.exists_intro (fun q -> a + b = q * d) (q1 + q2)))
let divides_sub a b d =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
divides_plus a (-b) d
let divides_mult_right a b d =
Classical.exists_elim (d `divides` (a * b)) (Squash.get_proof (d `divides` b))
(fun q ->
paren_mul_right a q d;
Classical.exists_intro (fun r -> a * b = r * d) (a * q))
///
/// GCD
///
let mod_divides a b =
Classical.exists_intro (fun q -> a = q * b) (a / b)
let divides_mod a b =
Classical.exists_elim (a % b = 0) (Squash.get_proof (b `divides` a))
(fun q -> cancel_mul_div q b)
let is_gcd_unique a b c d =
divide_antisym c d
let is_gcd_reflexive a = ()
let is_gcd_symmetric a b d = ()
let is_gcd_0 a = ()
let is_gcd_1 a = ()
let is_gcd_minus a b d =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
opp_idempotent b
let is_gcd_opp a b d =
Classical.forall_intro_2 (Classical.move_requires_2 divides_minus);
divides_opp d a;
divides_opp d b
let is_gcd_plus a b q d =
add_sub_r b (q * a);
Classical.forall_intro_3 (Classical.move_requires_3 divides_plus);
Classical.forall_intro_3 (Classical.move_requires_3 divides_mult_right);
Classical.forall_intro_3 (Classical.move_requires_3 divides_sub)
///
/// Extended Euclidean algorithm
///
val is_gcd_for_euclid (a b q d:int) : Lemma
(requires is_gcd b (a - q * b) d)
(ensures is_gcd a b d)
let is_gcd_for_euclid a b q d =
add_sub_l a (q * b);
is_gcd_plus b (a - q * b) q d
val egcd (a b u1 u2 u3 v1 v2 v3:int) : Pure (int & int & int)
(requires v3 >= 0 /\
u1 * a + u2 * b = u3 /\
v1 * a + v2 * b = v3 /\
(forall d. is_gcd u3 v3 d ==> is_gcd a b d))
(ensures (fun (u, v, d) -> u * a + v * b = d /\ is_gcd a b d))
(decreases v3)
let rec egcd a b u1 u2 u3 v1 v2 v3 =
if v3 = 0 then
begin
divides_0 u3;
(u1, u2, u3)
end
else
begin
let q = u3 / v3 in
euclidean_division_definition u3 v3;
assert (u3 - q * v3 = (q * v3 + u3 % v3) - q * v3);
assert (q * v3 - q * v3 = 0);
swap_add_plus_minus (q * v3) (u3 % v3) (q * v3);
calc (==) {
(u1 - q * v1) * a + (u2 - q * v2) * b;
== { _ by (FStar.Tactics.Canon.canon()) }
(u1 * a + u2 * b) - q * (v1 * a + v2 * b);
== { }
u3 - q * v3;
== { lemma_div_mod u3 v3 }
u3 % v3;
};
let u1, v1 = v1, u1 - q * v1 in
let u2, v2 = v2, u2 - q * v2 in
let u3' = u3 in
let v3' = v3 in
let u3, v3 = v3, u3 - q * v3 in
(* proving the implication in the precondition *)
introduce forall d. is_gcd v3' (u3' - q * v3') d ==> is_gcd u3' v3' d with
introduce _ ==> _ with _.
is_gcd_for_euclid u3' v3' q d;
let r = egcd a b u1 u2 u3 v1 v2 v3 in
r
end | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.Effect.fsti.checked",
"FStar.Tactics.Canon.fst.checked",
"FStar.Squash.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Classical.Sugar.fsti.checked",
"FStar.Classical.fsti.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "FStar.Math.Euclid.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.Math.Lemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: Prims.int -> b: Prims.int -> Prims.Pure ((Prims.int * Prims.int) * Prims.int) | Prims.Pure | [] | [] | [
"Prims.int",
"Prims.op_GreaterThanOrEqual",
"FStar.Math.Euclid.egcd",
"Prims.bool",
"Prims.unit",
"Prims._assert",
"FStar.Math.Euclid.is_gcd",
"FStar.Pervasives.Native.tuple3",
"Prims.op_Minus",
"FStar.Classical.Sugar.forall_intro",
"Prims.l_imp",
"FStar.Classical.Sugar.implies_intro",
"Prims.squash",
"FStar.Math.Euclid.is_gcd_symmetric",
"FStar.Math.Euclid.is_gcd_minus"
] | [] | false | false | false | false | false | let euclid_gcd a b =
| if b >= 0
then egcd a b 1 0 a 0 1 b
else
(introduce forall d . is_gcd a (- b) d ==> is_gcd a b d
with introduce _ ==> _
with _pf. (is_gcd_minus a b d;
is_gcd_symmetric b a d);
let res = egcd a b 1 0 a 0 (- 1) (- b) in
let _, _, d = res in
assert (is_gcd a b d);
res) | false |
Hacl.Spec.P256.Montgomery.fst | Hacl.Spec.P256.Montgomery.bn_mont_reduction_lemma | val bn_mont_reduction_lemma: x:LSeq.lseq uint64 8 -> n:LSeq.lseq uint64 4 -> Lemma
(requires BD.bn_v n = S.prime /\ BD.bn_v x < S.prime * S.prime)
(ensures BD.bn_v (SBM.bn_mont_reduction n (u64 1) x) == BD.bn_v x * fmont_R_inv % S.prime) | val bn_mont_reduction_lemma: x:LSeq.lseq uint64 8 -> n:LSeq.lseq uint64 4 -> Lemma
(requires BD.bn_v n = S.prime /\ BD.bn_v x < S.prime * S.prime)
(ensures BD.bn_v (SBM.bn_mont_reduction n (u64 1) x) == BD.bn_v x * fmont_R_inv % S.prime) | let bn_mont_reduction_lemma x n =
lemma_prime_mont ();
assert (SBM.bn_mont_pre n (u64 1));
let d, _ = SBML.eea_pow2_odd 256 (BD.bn_v n) in
let res = SBM.bn_mont_reduction n (u64 1) x in
assert_norm (S.prime * S.prime < S.prime * pow2 256);
assert (BD.bn_v x < S.prime * pow2 256);
SBM.bn_mont_reduction_lemma n (u64 1) x;
assert (BD.bn_v res == SBML.mont_reduction 64 4 (BD.bn_v n) 1 (BD.bn_v x));
SBML.mont_reduction_lemma 64 4 (BD.bn_v n) 1 (BD.bn_v x);
assert (BD.bn_v res == BD.bn_v x * d % S.prime);
calc (==) {
BD.bn_v x * d % S.prime;
(==) { Math.Lemmas.lemma_mod_mul_distr_r (BD.bn_v x) d S.prime }
BD.bn_v x * (d % S.prime) % S.prime;
(==) { }
BD.bn_v x * fmont_R_inv % S.prime;
} | {
"file_name": "code/ecdsap256/Hacl.Spec.P256.Montgomery.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 3,
"end_line": 202,
"start_col": 0,
"start_line": 183
} | module Hacl.Spec.P256.Montgomery
open FStar.Mul
open Lib.IntTypes
module S = Spec.P256
module M = Lib.NatMod
module BD = Hacl.Spec.Bignum.Definitions
module SBM = Hacl.Spec.Bignum.Montgomery
module SBML = Hacl.Spec.Montgomery.Lemmas
#set-options "--z3rlimit 50 --fuel 0 --ifuel 0"
/// Montgomery arithmetic for a base field
val lemma_abc_is_acb (a b c:nat) : Lemma (a * b * c = a * c * b)
let lemma_abc_is_acb a b c =
Math.Lemmas.paren_mul_right a b c;
Math.Lemmas.swap_mul b c;
Math.Lemmas.paren_mul_right a c b
val lemma_mod_mul_assoc (n:pos) (a b c:nat) : Lemma ((a * b % n) * c % n == (a * (b * c % n)) % n)
let lemma_mod_mul_assoc m a b c =
calc (==) {
(a * b % m) * c % m;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a * b) c m }
(a * b) * c % m;
(==) { Math.Lemmas.paren_mul_right a b c }
a * (b * c) % m;
(==) { Math.Lemmas.lemma_mod_mul_distr_r a (b * c) m }
a * (b * c % m) % m;
}
val lemma_to_from_mont_id_gen (n mont_R mont_R_inv:pos) (a:nat{a < n}) : Lemma
(requires mont_R * mont_R_inv % n = 1)
(ensures (a * mont_R % n) * mont_R_inv % n == a)
let lemma_to_from_mont_id_gen n mont_R mont_R_inv a =
lemma_mod_mul_assoc n a mont_R mont_R_inv;
Math.Lemmas.modulo_lemma a n
val lemma_from_to_mont_id_gen (n mont_R mont_R_inv:pos) (a:nat{a < n}) : Lemma
(requires mont_R_inv * mont_R % n = 1)
(ensures (a * mont_R_inv % n) * mont_R % n == a)
let lemma_from_to_mont_id_gen n mont_R mont_R_inv a =
lemma_to_from_mont_id_gen n mont_R_inv mont_R a
val mont_mul_lemma_gen (n:pos) (mont_R_inv a b: nat) :
Lemma (((a * mont_R_inv % n) * (b * mont_R_inv % n)) % n ==
((a * b * mont_R_inv) % n) * mont_R_inv % n)
let mont_mul_lemma_gen n mont_R_inv a b =
calc (==) {
((a * mont_R_inv % n) * (b * mont_R_inv % n)) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l
(a * mont_R_inv) (b * mont_R_inv % n) n }
(a * mont_R_inv * (b * mont_R_inv % n)) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_r (a * mont_R_inv) (b * mont_R_inv) n }
(a * mont_R_inv * (b * mont_R_inv)) % n;
(==) { Math.Lemmas.paren_mul_right a mont_R_inv (b * mont_R_inv) }
(a * (mont_R_inv * (b * mont_R_inv))) % n;
(==) { Math.Lemmas.paren_mul_right mont_R_inv b mont_R_inv }
(a * (mont_R_inv * b * mont_R_inv)) % n;
(==) { Math.Lemmas.swap_mul mont_R_inv b }
(a * (b * mont_R_inv * mont_R_inv)) % n;
(==) { Math.Lemmas.paren_mul_right a (b * mont_R_inv) mont_R_inv }
(a * (b * mont_R_inv) * mont_R_inv) % n;
(==) { Math.Lemmas.paren_mul_right a b mont_R_inv }
(a * b * mont_R_inv * mont_R_inv) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a * b * mont_R_inv) mont_R_inv n }
((a * b * mont_R_inv) % n) * mont_R_inv % n;
}
val mont_add_lemma_gen (n:pos) (mont_R_inv a b: nat) :
Lemma ((a * mont_R_inv % n + b * mont_R_inv % n) % n == (a + b) % n * mont_R_inv % n)
let mont_add_lemma_gen n mont_R_inv a b =
calc (==) {
(a * mont_R_inv % n + b * mont_R_inv % n) % n;
(==) { Math.Lemmas.modulo_distributivity (a * mont_R_inv) (b * mont_R_inv) n }
(a * mont_R_inv + b * mont_R_inv) % n;
(==) { Math.Lemmas.distributivity_add_left a b mont_R_inv }
(a + b) * mont_R_inv % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a + b) mont_R_inv n }
(a + b) % n * mont_R_inv % n;
}
val mont_sub_lemma_gen (n:pos) (mont_R_inv a b: nat) :
Lemma ((a * mont_R_inv % n - b * mont_R_inv % n) % n == (a - b) % n * mont_R_inv % n)
let mont_sub_lemma_gen n mont_R_inv a b =
calc (==) {
(a * mont_R_inv % n - b * mont_R_inv % n) % n;
(==) { Math.Lemmas.lemma_mod_sub_distr (a * mont_R_inv % n) (b * mont_R_inv) n }
(a * mont_R_inv % n - b * mont_R_inv) % n;
(==) { Math.Lemmas.lemma_mod_plus_distr_l (a * mont_R_inv) (- b * mont_R_inv) n }
(a * mont_R_inv - b * mont_R_inv) % n;
(==) { Math.Lemmas.distributivity_sub_left a b mont_R_inv }
(a - b) * mont_R_inv % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a - b) mont_R_inv n }
(a - b) % n * mont_R_inv % n;
}
val lemma_mont_inv_gen (n:pos{1 < n}) (mont_R:pos) (mont_R_inv:nat{mont_R_inv < n}) (a:nat{a < n}) :
Lemma
(requires M.pow_mod #n mont_R_inv (n - 2) == mont_R % n)
(ensures M.pow_mod #n (a * mont_R_inv % n) (n - 2) == M.pow_mod #n a (n - 2) * mont_R % n)
let lemma_mont_inv_gen n mont_R mont_R_inv k =
M.lemma_pow_mod #n (k * mont_R_inv % n) (n - 2);
// assert (M.pow_mod #n (k * mont_R_inv % n) (n - 2) ==
// M.pow (k * mont_R_inv % n) (n - 2) % n);
M.lemma_pow_mod_base (k * mont_R_inv) (n - 2) n;
// == M.pow (k * mont_R_inv) (n - 2) % n
M.lemma_pow_mul_base k mont_R_inv (n - 2);
// == M.pow k (n - 2) * M.pow mont_R_inv (n - 2) % n
Math.Lemmas.lemma_mod_mul_distr_r (M.pow k (n - 2)) (M.pow mont_R_inv (n - 2)) n;
// == M.pow k (n - 2) * (M.pow mont_R_inv (n - 2) % n) % n
M.lemma_pow_mod #n mont_R_inv (n - 2);
assert (M.pow_mod #n (k * mont_R_inv % n) (n - 2) == M.pow k (n - 2) * (mont_R % n) % n);
Math.Lemmas.lemma_mod_mul_distr_r (M.pow k (n - 2)) mont_R n;
// == M.pow k (n - 2) * mont_R % n
Math.Lemmas.lemma_mod_mul_distr_l (M.pow k (n - 2)) mont_R n;
// == M.pow k (n - 2) % n * mont_R % n
M.lemma_pow_mod #n k (n - 2)
let mont_cancel_lemma_gen n mont_R mont_R_inv a b =
calc (==) {
(a * mont_R % n * b * mont_R_inv) % n;
(==) { Math.Lemmas.paren_mul_right (a * mont_R % n) b mont_R_inv }
(a * mont_R % n * (b * mont_R_inv)) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_l (a * mont_R) (b * mont_R_inv) n }
(a * mont_R * (b * mont_R_inv)) % n;
(==) { Math.Lemmas.paren_mul_right a mont_R (b * mont_R_inv);
Math.Lemmas.swap_mul mont_R (b * mont_R_inv) }
(a * (b * mont_R_inv * mont_R)) % n;
(==) { Math.Lemmas.paren_mul_right b mont_R_inv mont_R }
(a * (b * (mont_R_inv * mont_R))) % n;
(==) { Math.Lemmas.paren_mul_right a b (mont_R_inv * mont_R) }
(a * b * (mont_R_inv * mont_R)) % n;
(==) { Math.Lemmas.lemma_mod_mul_distr_r (a * b) (mont_R_inv * mont_R) n }
(a * b * (mont_R_inv * mont_R % n)) % n;
(==) { assert (mont_R_inv * mont_R % n = 1) }
(a * b) % n;
}
let fmont_R_inv =
let d, _ = SBML.eea_pow2_odd 256 S.prime in d % S.prime
let mul_fmont_R_and_R_inv_is_one () =
let d, k = SBML.eea_pow2_odd 256 S.prime in
SBML.mont_preconditions_d 64 4 S.prime;
assert (d * pow2 256 % S.prime = 1);
Math.Lemmas.lemma_mod_mul_distr_l d (pow2 256) S.prime
//--------------------------------------//
// bn_mont_reduction is x * fmont_R_inv //
//--------------------------------------//
val lemma_prime_mont: unit ->
Lemma (S.prime % 2 = 1 /\ S.prime < pow2 256 /\ (1 + S.prime) % pow2 64 = 0)
let lemma_prime_mont () =
assert_norm (S.prime % 2 = 1);
assert_norm (S.prime < pow2 256);
assert_norm ((1 + S.prime) % pow2 64 = 0) | {
"checked_file": "/",
"dependencies": [
"Spec.P256.Lemmas.fsti.checked",
"Spec.P256.fst.checked",
"prims.fst.checked",
"Lib.NatMod.fsti.checked",
"Lib.IntTypes.fsti.checked",
"Hacl.Spec.Montgomery.Lemmas.fst.checked",
"Hacl.Spec.Bignum.Montgomery.fsti.checked",
"Hacl.Spec.Bignum.Definitions.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": true,
"source_file": "Hacl.Spec.P256.Montgomery.fst"
} | [
{
"abbrev": true,
"full_module": "Hacl.Spec.Montgomery.Lemmas",
"short_module": "SBML"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Montgomery",
"short_module": "SBM"
},
{
"abbrev": true,
"full_module": "Hacl.Spec.Bignum.Definitions",
"short_module": "BD"
},
{
"abbrev": true,
"full_module": "Lib.Sequence",
"short_module": "LSeq"
},
{
"abbrev": true,
"full_module": "Lib.NatMod",
"short_module": "M"
},
{
"abbrev": true,
"full_module": "Spec.P256",
"short_module": "S"
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec.P256",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Spec.P256",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: Lib.Sequence.lseq Lib.IntTypes.uint64 8 -> n: Lib.Sequence.lseq Lib.IntTypes.uint64 4
-> FStar.Pervasives.Lemma
(requires
Hacl.Spec.Bignum.Definitions.bn_v n = Spec.P256.PointOps.prime /\
Hacl.Spec.Bignum.Definitions.bn_v x < Spec.P256.PointOps.prime * Spec.P256.PointOps.prime)
(ensures
Hacl.Spec.Bignum.Definitions.bn_v (Hacl.Spec.Bignum.Montgomery.bn_mont_reduction n
(Lib.IntTypes.u64 1)
x) ==
Hacl.Spec.Bignum.Definitions.bn_v x * Hacl.Spec.P256.Montgomery.fmont_R_inv %
Spec.P256.PointOps.prime) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Lib.Sequence.lseq",
"Lib.IntTypes.uint64",
"Prims.int",
"FStar.Calc.calc_finish",
"Prims.eq2",
"Prims.op_Modulus",
"FStar.Mul.op_Star",
"Hacl.Spec.Bignum.Definitions.bn_v",
"Lib.IntTypes.U64",
"Spec.P256.PointOps.prime",
"Hacl.Spec.P256.Montgomery.fmont_R_inv",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"Prims.unit",
"FStar.Calc.calc_step",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"FStar.Math.Lemmas.lemma_mod_mul_distr_r",
"Prims.squash",
"Prims._assert",
"Hacl.Spec.Montgomery.Lemmas.mont_reduction_lemma",
"Prims.nat",
"Hacl.Spec.Montgomery.Lemmas.mont_reduction",
"Hacl.Spec.Bignum.Montgomery.bn_mont_reduction_lemma",
"Lib.IntTypes.u64",
"Prims.b2t",
"Prims.op_LessThan",
"Prims.pow2",
"FStar.Pervasives.assert_norm",
"Hacl.Spec.Bignum.Definitions.lbignum",
"Hacl.Spec.Bignum.Montgomery.bn_mont_reduction",
"FStar.Pervasives.Native.tuple2",
"Hacl.Spec.Montgomery.Lemmas.eea_pow2_odd",
"Hacl.Spec.Bignum.Montgomery.bn_mont_pre",
"Hacl.Spec.P256.Montgomery.lemma_prime_mont"
] | [] | false | false | true | false | false | let bn_mont_reduction_lemma x n =
| lemma_prime_mont ();
assert (SBM.bn_mont_pre n (u64 1));
let d, _ = SBML.eea_pow2_odd 256 (BD.bn_v n) in
let res = SBM.bn_mont_reduction n (u64 1) x in
assert_norm (S.prime * S.prime < S.prime * pow2 256);
assert (BD.bn_v x < S.prime * pow2 256);
SBM.bn_mont_reduction_lemma n (u64 1) x;
assert (BD.bn_v res == SBML.mont_reduction 64 4 (BD.bn_v n) 1 (BD.bn_v x));
SBML.mont_reduction_lemma 64 4 (BD.bn_v n) 1 (BD.bn_v x);
assert (BD.bn_v res == BD.bn_v x * d % S.prime);
calc ( == ) {
BD.bn_v x * d % S.prime;
( == ) { Math.Lemmas.lemma_mod_mul_distr_r (BD.bn_v x) d S.prime }
BD.bn_v x * (d % S.prime) % S.prime;
( == ) { () }
BD.bn_v x * fmont_R_inv % S.prime;
} | false |
Vale.X64.Lemmas.fst | Vale.X64.Lemmas.lemma_eq_instr_write_outputs | val lemma_eq_instr_write_outputs
(outs: list instr_out)
(args: list instr_operand)
(vs: instr_ret_t outs)
(oprs: instr_operands_t outs args)
(s1_orig s1 s2_orig s2: machine_state)
: Lemma (requires state_eq_S true s1_orig s2_orig /\ state_eq_S true s1 s2)
(ensures
state_eq_S true
(BS.instr_write_outputs outs args vs oprs s1_orig s1)
(BS.instr_write_outputs outs args vs oprs s2_orig s2)) | val lemma_eq_instr_write_outputs
(outs: list instr_out)
(args: list instr_operand)
(vs: instr_ret_t outs)
(oprs: instr_operands_t outs args)
(s1_orig s1 s2_orig s2: machine_state)
: Lemma (requires state_eq_S true s1_orig s2_orig /\ state_eq_S true s1 s2)
(ensures
state_eq_S true
(BS.instr_write_outputs outs args vs oprs s1_orig s1)
(BS.instr_write_outputs outs args vs oprs s2_orig s2)) | let rec lemma_eq_instr_write_outputs
(outs:list instr_out) (args:list instr_operand)
(vs:instr_ret_t outs) (oprs:instr_operands_t outs args) (s1_orig s1 s2_orig s2:machine_state)
: Lemma
(requires state_eq_S true s1_orig s2_orig /\ state_eq_S true s1 s2)
(ensures
state_eq_S true
(BS.instr_write_outputs outs args vs oprs s1_orig s1)
(BS.instr_write_outputs outs args vs oprs s2_orig s2))
=
let open BS in
use_machine_state_equal ();
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
lemma_heap_ignore_ghost_machine s1_orig.BS.ms_heap s2_orig.BS.ms_heap;
allow_inversion tmaddr;
match outs with
| [] -> ()
| (_, i)::outs ->
(
let ((v:instr_val_t i), (vs:instr_ret_t outs)) =
match outs with
| [] -> (vs, ())
| _::_ -> let vs = coerce vs in (fst vs, snd vs)
in
match i with
| IOpEx i ->
let oprs = coerce oprs in
let s1 = instr_write_output_explicit i v (fst oprs) s1_orig s1 in
let s2 = instr_write_output_explicit i v (fst oprs) s2_orig s2 in
lemma_eq_instr_write_outputs outs args vs (snd oprs) s1_orig s1 s2_orig s2
| IOpIm i ->
let s1 = instr_write_output_implicit i v s1_orig s1 in
let s2 = instr_write_output_implicit i v s2_orig s2 in
allow_inversion operand64;
allow_inversion operand128;
lemma_eq_instr_write_outputs outs args vs (coerce oprs) s1_orig s1 s2_orig s2
) | {
"file_name": "vale/code/arch/x64/Vale.X64.Lemmas.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 5,
"end_line": 112,
"start_col": 0,
"start_line": 76
} | module Vale.X64.Lemmas
open FStar.Mul
open Vale.X64.Machine_s
open Vale.X64.State
open Vale.X64.StateLemmas
open Vale.X64.Instruction_s
open Vale.X64.Bytes_Code_s
module BS = Vale.X64.Machine_Semantics_s
module ME = Vale.X64.Memory
#reset-options "--initial_fuel 1 --max_fuel 1 --z3rlimit 100"
#restart-solver
let rec lemma_eq_instr_apply_eval_args
(outs:list instr_out) (args:list instr_operand)
(f:instr_args_t outs args) (oprs:instr_operands_t_args args) (s1 s2:machine_state)
: Lemma
(requires state_eq_S true s1 s2)
(ensures
BS.instr_apply_eval_args outs args f oprs s1 ==
BS.instr_apply_eval_args outs args f oprs s2)
=
let open BS in
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
match args with
| [] -> ()
| i::args ->
(
let (v, oprs) : option (instr_val_t i) & instr_operands_t_args args =
match i with
| IOpEx i -> let oprs = coerce oprs in (instr_eval_operand_explicit i (fst oprs) s1, snd oprs)
| IOpIm i -> (instr_eval_operand_implicit i s1, coerce oprs)
in
let f:arrow (instr_val_t i) (instr_args_t outs args) = coerce f in
match v with
| None -> ()
| Some v -> lemma_eq_instr_apply_eval_args outs args (f v) oprs s1 s2
)
#restart-solver
let rec lemma_eq_instr_apply_eval_inouts
(outs inouts:list instr_out) (args:list instr_operand)
(f:instr_inouts_t outs inouts args) (oprs:instr_operands_t inouts args) (s1 s2:machine_state)
: Lemma
(requires state_eq_S true s1 s2)
(ensures
BS.instr_apply_eval_inouts outs inouts args f oprs s1 ==
BS.instr_apply_eval_inouts outs inouts args f oprs s2)
=
let open BS in
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
match inouts with
| [] -> lemma_eq_instr_apply_eval_args outs args f oprs s1 s2
| (Out, i)::inouts ->
let oprs =
match i with
| IOpEx i -> snd #(instr_operand_t i) (coerce oprs)
| IOpIm i -> coerce oprs
in
lemma_eq_instr_apply_eval_inouts outs inouts args (coerce f) oprs s1 s2
| (InOut, i)::inouts ->
(
let (v, oprs) : option (instr_val_t i) & instr_operands_t inouts args =
match i with
| IOpEx i -> let oprs = coerce oprs in (instr_eval_operand_explicit i (fst oprs) s1, snd oprs)
| IOpIm i -> (instr_eval_operand_implicit i s1, coerce oprs)
in
let f:arrow (instr_val_t i) (instr_inouts_t outs inouts args) = coerce f in
match v with
| None -> ()
| Some v -> lemma_eq_instr_apply_eval_inouts outs inouts args (f v) oprs s1 s2
)
#restart-solver | {
"checked_file": "/",
"dependencies": [
"Vale.X64.StateLemmas.fsti.checked",
"Vale.X64.State.fsti.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_Semantics_s.fst.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.Instruction_s.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Bytes_Code_s.fst.checked",
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.FunctionalExtensionality.fsti.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Lemmas.fst"
} | [
{
"abbrev": true,
"full_module": "Vale.X64.Memory",
"short_module": "ME"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Instruction_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.X64.Machine_Semantics_s",
"short_module": "BS"
},
{
"abbrev": false,
"full_module": "Vale.X64.Bytes_Code_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.StateLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapLemmas",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 2,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
outs: Prims.list Vale.X64.Instruction_s.instr_out ->
args: Prims.list Vale.X64.Instruction_s.instr_operand ->
vs: Vale.X64.Instruction_s.instr_ret_t outs ->
oprs: Vale.X64.Instruction_s.instr_operands_t outs args ->
s1_orig: Vale.X64.StateLemmas.machine_state ->
s1: Vale.X64.StateLemmas.machine_state ->
s2_orig: Vale.X64.StateLemmas.machine_state ->
s2: Vale.X64.StateLemmas.machine_state
-> FStar.Pervasives.Lemma
(requires
Vale.X64.Lemmas.state_eq_S true s1_orig s2_orig /\ Vale.X64.Lemmas.state_eq_S true s1 s2)
(ensures
Vale.X64.Lemmas.state_eq_S true
(Vale.X64.Machine_Semantics_s.instr_write_outputs outs args vs oprs s1_orig s1)
(Vale.X64.Machine_Semantics_s.instr_write_outputs outs args vs oprs s2_orig s2)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"Prims.list",
"Vale.X64.Instruction_s.instr_out",
"Vale.X64.Instruction_s.instr_operand",
"Vale.X64.Instruction_s.instr_ret_t",
"Vale.X64.Instruction_s.instr_operands_t",
"Vale.X64.StateLemmas.machine_state",
"Vale.X64.Instruction_s.instr_operand_inout",
"Vale.X64.Instruction_s.instr_val_t",
"Vale.X64.Instruction_s.instr_operand_explicit",
"Vale.X64.Lemmas.lemma_eq_instr_write_outputs",
"FStar.Pervasives.Native.snd",
"Vale.X64.Instruction_s.instr_operand_t",
"Vale.X64.Machine_Semantics_s.machine_state",
"Vale.X64.Machine_Semantics_s.instr_write_output_explicit",
"FStar.Pervasives.Native.fst",
"FStar.Pervasives.Native.tuple2",
"Vale.X64.Instruction_s.coerce",
"Vale.X64.Instruction_s.instr_operand_implicit",
"Prims.unit",
"FStar.Pervasives.allow_inversion",
"Vale.X64.Machine_s.operand128",
"Vale.X64.Machine_s.operand64",
"Vale.X64.Machine_Semantics_s.instr_write_output_implicit",
"FStar.Pervasives.Native.Mktuple2",
"Vale.X64.Machine_s.tmaddr",
"Vale.Arch.HeapLemmas.lemma_heap_ignore_ghost_machine",
"Vale.X64.Machine_Semantics_s.__proj__Mkmachine_state__item__ms_heap",
"Vale.X64.StateLemmas.use_machine_state_equal",
"Prims.l_and",
"Vale.X64.Lemmas.state_eq_S",
"Prims.squash",
"Vale.X64.Machine_Semantics_s.instr_write_outputs",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [
"recursion"
] | false | false | true | false | false | let rec lemma_eq_instr_write_outputs
(outs: list instr_out)
(args: list instr_operand)
(vs: instr_ret_t outs)
(oprs: instr_operands_t outs args)
(s1_orig s1 s2_orig s2: machine_state)
: Lemma (requires state_eq_S true s1_orig s2_orig /\ state_eq_S true s1 s2)
(ensures
state_eq_S true
(BS.instr_write_outputs outs args vs oprs s1_orig s1)
(BS.instr_write_outputs outs args vs oprs s2_orig s2)) =
| let open BS in
use_machine_state_equal ();
lemma_heap_ignore_ghost_machine s1.BS.ms_heap s2.BS.ms_heap;
lemma_heap_ignore_ghost_machine s1_orig.BS.ms_heap s2_orig.BS.ms_heap;
allow_inversion tmaddr;
match outs with
| [] -> ()
| (_, i) :: outs ->
(let (v: instr_val_t i), (vs: instr_ret_t outs) =
match outs with
| [] -> (vs, ())
| _ :: _ ->
let vs = coerce vs in
(fst vs, snd vs)
in
match i with
| IOpEx i ->
let oprs = coerce oprs in
let s1 = instr_write_output_explicit i v (fst oprs) s1_orig s1 in
let s2 = instr_write_output_explicit i v (fst oprs) s2_orig s2 in
lemma_eq_instr_write_outputs outs args vs (snd oprs) s1_orig s1 s2_orig s2
| IOpIm i ->
let s1 = instr_write_output_implicit i v s1_orig s1 in
let s2 = instr_write_output_implicit i v s2_orig s2 in
allow_inversion operand64;
allow_inversion operand128;
lemma_eq_instr_write_outputs outs args vs (coerce oprs) s1_orig s1 s2_orig s2) | false |
PointStructDirectDef.fst | PointStructDirectDef.swap_struct | val swap_struct (p: ref point) (v: Ghost.erased (typeof point))
: ST (Ghost.erased (typeof point))
(p `pts_to` v)
(fun v' -> p `pts_to` v')
(requires
exists (vx: U32.t) (vy: U32.t).
struct_get_field v "x" == mk_scalar vx /\ struct_get_field v "y" == mk_scalar vy)
(ensures
fun v' ->
struct_get_field v' "x" == struct_get_field v "y" /\
struct_get_field v' "y" == struct_get_field v "x") | val swap_struct (p: ref point) (v: Ghost.erased (typeof point))
: ST (Ghost.erased (typeof point))
(p `pts_to` v)
(fun v' -> p `pts_to` v')
(requires
exists (vx: U32.t) (vy: U32.t).
struct_get_field v "x" == mk_scalar vx /\ struct_get_field v "y" == mk_scalar vy)
(ensures
fun v' ->
struct_get_field v' "x" == struct_get_field v "y" /\
struct_get_field v' "y" == struct_get_field v "x") | let swap_struct (p: ref point) (v: Ghost.erased (typeof point))
: ST (Ghost.erased (typeof point))
(p `pts_to` v)
(fun v' -> p `pts_to` v')
(requires
exists (vx vy: U32.t) . struct_get_field v "x" == mk_scalar vx /\ struct_get_field v "y" == mk_scalar vy
)
(ensures fun v' ->
struct_get_field v' "x" == struct_get_field v "y" /\
struct_get_field v' "y" == struct_get_field v "x"
)
= let px = struct_field p "x" () in
let py = struct_field p "y" () in
let x = read px in
let y = read py in
write px y;
write py x;
let _ = unstruct_field p "x" px in
let _ = unstruct_field p "y" py in
drop (has_struct_field _ _ px);
drop (has_struct_field _ _ _);
return _ | {
"file_name": "share/steel/examples/steelc/PointStructDirectDef.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 10,
"end_line": 53,
"start_col": 0,
"start_line": 32
} | module PointStructDirectDef
open Steel.ST.Util
open Steel.ST.C.Types
module U32 = FStar.UInt32
// module C = C // for _zero_for_deref
let swap (#v1 #v2: Ghost.erased U32.t) (r1 r2: ref (scalar U32.t)) : STT unit
((r1 `pts_to` mk_scalar (Ghost.reveal v1)) `star` (r2 `pts_to` mk_scalar (Ghost.reveal v2)))
(fun _ -> (r1 `pts_to` mk_scalar (Ghost.reveal v2)) `star` (r2 `pts_to` mk_scalar (Ghost.reveal v1)))
= let x1 = read r1 in
let x2 = read r2 in
write r1 x2;
write r2 x1;
return () // necessary to enable smt_fallback
noextract
inline_for_extraction
[@@ norm_field_attr]
let point_fields =
field_description_cons "x" (scalar U32.t) (
field_description_cons "y" (scalar U32.t) (
field_description_nil))
let point_t = struct_t "PointStructDirectDef.point_t" point_fields
noextract
let point : typedef point_t = struct0 _ _ _
#push-options "--query_stats --fuel 0 --print_implicits" | {
"checked_file": "/",
"dependencies": [
"Steel.ST.Util.fsti.checked",
"Steel.ST.C.Types.fst.checked",
"prims.fst.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "PointStructDirectDef.fst"
} | [
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "Steel.ST.C.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Steel.ST.Util",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
p: Steel.ST.C.Types.Base.ref PointStructDirectDef.point ->
v: FStar.Ghost.erased (Steel.ST.C.Types.Base.typeof PointStructDirectDef.point)
-> Steel.ST.Effect.ST
(FStar.Ghost.erased (Steel.ST.C.Types.Base.typeof PointStructDirectDef.point)) | Steel.ST.Effect.ST | [] | [] | [
"Steel.ST.C.Types.Base.ref",
"PointStructDirectDef.point_t",
"PointStructDirectDef.point",
"FStar.Ghost.erased",
"Steel.ST.C.Types.Base.typeof",
"Steel.ST.Util.return",
"FStar.Ghost.hide",
"FStar.Set.set",
"Steel.Memory.iname",
"FStar.Set.empty",
"Steel.ST.C.Types.Base.pts_to",
"Steel.Effect.Common.vprop",
"Prims.unit",
"Steel.ST.Util.drop",
"Steel.ST.C.Types.Struct.has_struct_field",
"Steel.C.Typestring.string_cons",
"Steel.C.Typestring.cP",
"Steel.C.Typestring.co",
"Steel.C.Typestring.ci",
"Steel.C.Typestring.cn",
"Steel.C.Typestring.ct",
"Steel.C.Typestring.cS",
"Steel.C.Typestring.cr",
"Steel.C.Typestring.cu",
"Steel.C.Typestring.cc",
"Steel.C.Typestring.cD",
"Steel.C.Typestring.ce",
"Steel.C.Typestring.cf",
"Steel.C.Typestring.cdot",
"Steel.C.Typestring.cp",
"Steel.C.Typestring.c_",
"Steel.C.Typestring.string_nil",
"Steel.ST.C.Types.Fields.field_t_cons",
"Steel.C.Typestring.cx",
"Steel.ST.C.Types.Scalar.scalar_t",
"FStar.UInt32.t",
"Steel.C.Typestring.cy",
"Steel.ST.C.Types.Fields.field_t_nil",
"PointStructDirectDef.point_fields",
"Steel.ST.C.Types.Scalar.scalar",
"Steel.ST.C.Types.Struct.struct_t0",
"Steel.ST.C.Types.Struct.unstruct_field",
"Steel.ST.C.Types.Base.mk_fraction",
"Steel.ST.C.Types.Scalar.mk_scalar",
"Steel.FractionalPermission.full_perm",
"Steel.ST.C.Types.Struct.struct_set_field",
"Steel.ST.C.Types.Base.unknown",
"Steel.ST.C.Types.Fields.__proj__Mkfield_description_t__item__fd_type",
"Steel.ST.C.Types.Fields.__proj__Mkfield_description_t__item__fd_typedef",
"FStar.Ghost.reveal",
"Steel.ST.C.Types.Scalar.write",
"Steel.ST.C.Types.Struct.struct_get_field",
"Steel.ST.C.Types.Scalar.read",
"Steel.ST.C.Types.Struct.struct_field",
"Prims.l_Exists",
"Prims.l_and",
"Prims.eq2",
"Prims.l_True"
] | [] | false | true | false | false | false | let swap_struct (p: ref point) (v: Ghost.erased (typeof point))
: ST (Ghost.erased (typeof point))
(p `pts_to` v)
(fun v' -> p `pts_to` v')
(requires
exists (vx: U32.t) (vy: U32.t).
struct_get_field v "x" == mk_scalar vx /\ struct_get_field v "y" == mk_scalar vy)
(ensures
fun v' ->
struct_get_field v' "x" == struct_get_field v "y" /\
struct_get_field v' "y" == struct_get_field v "x") =
| let px = struct_field p "x" () in
let py = struct_field p "y" () in
let x = read px in
let y = read py in
write px y;
write py x;
let _ = unstruct_field p "x" px in
let _ = unstruct_field p "y" py in
drop (has_struct_field _ _ px);
drop (has_struct_field _ _ _);
return _ | false |
MerkleTree.Low.fst | MerkleTree.Low.uint32_max | val uint32_max : FStar.UInt64.t | let uint32_max = 4294967295UL | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 29,
"end_line": 53,
"start_col": 0,
"start_line": 53
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | FStar.UInt64.t | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt64.__uint_to_t"
] | [] | false | false | false | true | false | let uint32_max =
| 4294967295uL | false |
|
MerkleTree.Low.fst | MerkleTree.Low.uint32_32_max | val uint32_32_max : FStar.UInt32.t | let uint32_32_max = 4294967295ul | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 32,
"end_line": 51,
"start_col": 0,
"start_line": 51
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | FStar.UInt32.t | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | true | false | let uint32_32_max =
| 4294967295ul | false |
|
MerkleTree.Low.fst | MerkleTree.Low.uint64_max | val uint64_max : FStar.UInt64.t | let uint64_max = 18446744073709551615UL | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 39,
"end_line": 54,
"start_col": 0,
"start_line": 54
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | FStar.UInt64.t | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt64.__uint_to_t"
] | [] | false | false | false | true | false | let uint64_max =
| 18446744073709551615uL | false |
|
MerkleTree.Low.fst | MerkleTree.Low.u32_64 | val u32_64 : a: FStar.UInt32.t -> b: FStar.UInt64.t{FStar.UInt64.v b = FStar.UInt32.v a} | let u32_64 = Int.Cast.uint32_to_uint64 | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 77,
"end_line": 58,
"start_col": 39,
"start_line": 58
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: FStar.UInt32.t -> b: FStar.UInt64.t{FStar.UInt64.v b = FStar.UInt32.v a} | Prims.Tot | [
"total"
] | [] | [
"FStar.Int.Cast.uint32_to_uint64"
] | [] | false | false | false | false | false | let u32_64 =
| Int.Cast.uint32_to_uint64 | false |
|
MerkleTree.Low.fst | MerkleTree.Low.offset_range_limit | val offset_range_limit : FStar.UInt64.t | let offset_range_limit = uint32_max | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 35,
"end_line": 55,
"start_col": 0,
"start_line": 55
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | FStar.UInt64.t | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.uint32_max"
] | [] | false | false | false | true | false | let offset_range_limit =
| uint32_max | false |
|
MerkleTree.Low.fst | MerkleTree.Low.u64_32 | val u64_32 : a: FStar.UInt64.t -> b: FStar.UInt32.t{FStar.UInt32.v b = FStar.UInt64.v a % Prims.pow2 32} | let u64_32 = Int.Cast.uint64_to_uint32 | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 77,
"end_line": 59,
"start_col": 39,
"start_line": 59
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: FStar.UInt64.t -> b: FStar.UInt32.t{FStar.UInt32.v b = FStar.UInt64.v a % Prims.pow2 32} | Prims.Tot | [
"total"
] | [] | [
"FStar.Int.Cast.uint64_to_uint32"
] | [] | false | false | false | false | false | let u64_32 =
| Int.Cast.uint64_to_uint32 | false |
|
MerkleTree.Low.fst | MerkleTree.Low.merkle_tree_conditions | val merkle_tree_conditions
(#hsz: Ghost.erased hash_size_t)
(offset: uint64_t)
(i j: uint32_t)
(hs: hash_vv hsz)
(rhs_ok: bool)
(rhs: hash_vec #hsz)
(mroot: hash #hsz)
: Tot bool | val merkle_tree_conditions
(#hsz: Ghost.erased hash_size_t)
(offset: uint64_t)
(i j: uint32_t)
(hs: hash_vv hsz)
(rhs_ok: bool)
(rhs: hash_vec #hsz)
(mroot: hash #hsz)
: Tot bool | let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 37,
"end_line": 110,
"start_col": 0,
"start_line": 107
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
offset: EverCrypt.Helpers.uint64_t ->
i: LowStar.Vector.uint32_t ->
j: LowStar.Vector.uint32_t ->
hs: MerkleTree.Low.Datastructures.hash_vv (FStar.Ghost.reveal hsz) ->
rhs_ok: Prims.bool ->
rhs: MerkleTree.Low.Datastructures.hash_vec ->
mroot: MerkleTree.Low.Datastructures.hash
-> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"EverCrypt.Helpers.uint64_t",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.Datastructures.hash_vv",
"FStar.Ghost.reveal",
"Prims.bool",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hash",
"Prims.op_AmpAmp",
"FStar.Integers.op_Greater_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.add64_fits",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.merkle_tree_size_lg"
] | [] | false | false | false | false | false | let merkle_tree_conditions
(#hsz: Ghost.erased hash_size_t)
(offset: uint64_t)
(i j: uint32_t)
(hs: hash_vv hsz)
(rhs_ok: bool)
(rhs: hash_vec #hsz)
(mroot: hash #hsz)
: Tot bool =
| j >= i && add64_fits offset j && V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg | false |
Test.NoHeap.fst | Test.NoHeap.test_hkdf | val test_hkdf: vs:L.lbuffer hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_hkdf: vs:L.lbuffer hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_hkdf = test_many !$"HKDF" test_one_hkdf | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 48,
"end_line": 178,
"start_col": 0,
"start_line": 178
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a
val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end
let test_hmac = test_many !$"HMAC" test_one_hmac
/// HKDF
/// ----
val test_one_hkdf: hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hkdf vec =
let ha, (LB ikmlen ikm), (LB saltlen salt),
(LB infolen info), (LB prklen expected_prk), (LB okmlen expected_okm) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if prklen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected PRK\n"
else if okmlen > 255ul * Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong output length\n"
else if not (keysized ha saltlen) then
failwith "Saltlen is not keysized\n"
else if not (keysized ha prklen) then
failwith "Prklen is not keysized\n"
else if not (ikmlen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "ikmlen is too large\n"
else if not (infolen <= 0xfffffffful -
Hacl.Hash.Definitions.(block_len ha + hash_len ha + 1ul)) then
failwith "infolen is too large\n"
else if EverCrypt.HMAC.is_supported_alg ha then begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v saltlen));
assert (v ikmlen + Spec.Hash.Definitions.block_length ha < pow2 32);
assert Spec.Hash.Definitions.(hash_length ha
+ v infolen + 1 + block_length ha < pow2 32);
B.recall salt;
B.recall ikm;
B.recall info;
let str = string_of_alg ha in
let computed_prk = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HKDF.extract ha computed_prk salt saltlen ikm ikmlen;
B.recall expected_prk;
TestLib.compare_and_print str expected_prk computed_prk (Hacl.Hash.Definitions.hash_len ha);
let computed_okm = B.alloca 0uy (okmlen + 1ul) in
let computed_okm = B.sub computed_okm 0ul okmlen in
EverCrypt.HKDF.expand ha computed_okm computed_prk prklen info infolen okmlen;
B.recall expected_okm;
TestLib.compare_and_print str expected_okm computed_okm okmlen;
pop_frame()
end | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | vs: Test.Lowstarize.lbuffer Test.NoHeap.hkdf_vector -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Test.NoHeap.test_many",
"Test.NoHeap.hkdf_vector",
"Test.NoHeap.op_Bang_Dollar",
"Test.NoHeap.test_one_hkdf"
] | [] | false | true | false | false | false | let test_hkdf =
| test_many !$"HKDF" test_one_hkdf | false |
Test.NoHeap.fst | Test.NoHeap.test_hmac | val test_hmac: vs:L.lbuffer hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_hmac: vs:L.lbuffer hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_hmac = test_many !$"HMAC" test_one_hmac | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 48,
"end_line": 129,
"start_col": 0,
"start_line": 129
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a
val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | vs: Test.Lowstarize.lbuffer Test.NoHeap.hmac_vector -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Test.NoHeap.test_many",
"Test.NoHeap.hmac_vector",
"Test.NoHeap.op_Bang_Dollar",
"Test.NoHeap.test_one_hmac"
] | [] | false | true | false | false | false | let test_hmac =
| test_many !$"HMAC" test_one_hmac | false |
MerkleTree.Low.fst | MerkleTree.Low.merkle_tree_size_lg | val merkle_tree_size_lg: uint32_t | val merkle_tree_size_lg: uint32_t | let merkle_tree_size_lg = 32ul | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 30,
"end_line": 78,
"start_col": 0,
"start_line": 78
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | LowStar.Vector.uint32_t | Prims.Tot | [
"total"
] | [] | [
"FStar.UInt32.__uint_to_t"
] | [] | false | false | false | true | false | let merkle_tree_size_lg =
| 32ul | false |
Test.NoHeap.fst | Test.NoHeap.test_hash | val test_hash: vs:L.lbuffer hash_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_hash: vs:L.lbuffer hash_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_hash = test_many !$"Hashes" test_one_hash | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 50,
"end_line": 92,
"start_col": 0,
"start_line": 92
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | vs: Test.Lowstarize.lbuffer Test.NoHeap.hash_vector -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Test.NoHeap.test_many",
"Test.NoHeap.hash_vector",
"Test.NoHeap.op_Bang_Dollar",
"Test.NoHeap.test_one_hash"
] | [] | false | true | false | false | false | let test_hash =
| test_many !$"Hashes" test_one_hash | false |
Test.NoHeap.fst | Test.NoHeap.failwith | val failwith : _: Prims.string -> FStar.HyperStack.ST.Stack _ | let failwith = LowStar.Failure.failwith | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 39,
"end_line": 41,
"start_col": 0,
"start_line": 41
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$)) | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Prims.string -> FStar.HyperStack.ST.Stack _ | FStar.HyperStack.ST.Stack | [] | [] | [
"LowStar.Failure.failwith",
"Prims.string",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_True",
"Prims.l_False"
] | [] | false | true | false | false | false | let failwith =
| LowStar.Failure.failwith | false |
|
MerkleTree.Low.fst | MerkleTree.Low.mt_not_full_nst | val mt_not_full_nst: mtv:merkle_tree -> Tot bool | val mt_not_full_nst: mtv:merkle_tree -> Tot bool | let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 51,
"end_line": 117,
"start_col": 0,
"start_line": 117
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtv: MerkleTree.Low.merkle_tree -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.merkle_tree",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.Low.uint32_32_max",
"Prims.bool"
] | [] | false | false | false | true | false | let mt_not_full_nst mtv =
| MT?.j mtv < uint32_32_max | false |
Test.NoHeap.fst | Test.NoHeap.test_chacha20 | val test_chacha20: L.lbuffer chacha20_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_chacha20: L.lbuffer chacha20_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_chacha20 = test_many !$"CHACHA20" test_one_chacha20 | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 60,
"end_line": 210,
"start_col": 0,
"start_line": 210
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a
val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end
let test_hmac = test_many !$"HMAC" test_one_hmac
/// HKDF
/// ----
val test_one_hkdf: hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hkdf vec =
let ha, (LB ikmlen ikm), (LB saltlen salt),
(LB infolen info), (LB prklen expected_prk), (LB okmlen expected_okm) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if prklen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected PRK\n"
else if okmlen > 255ul * Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong output length\n"
else if not (keysized ha saltlen) then
failwith "Saltlen is not keysized\n"
else if not (keysized ha prklen) then
failwith "Prklen is not keysized\n"
else if not (ikmlen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "ikmlen is too large\n"
else if not (infolen <= 0xfffffffful -
Hacl.Hash.Definitions.(block_len ha + hash_len ha + 1ul)) then
failwith "infolen is too large\n"
else if EverCrypt.HMAC.is_supported_alg ha then begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v saltlen));
assert (v ikmlen + Spec.Hash.Definitions.block_length ha < pow2 32);
assert Spec.Hash.Definitions.(hash_length ha
+ v infolen + 1 + block_length ha < pow2 32);
B.recall salt;
B.recall ikm;
B.recall info;
let str = string_of_alg ha in
let computed_prk = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HKDF.extract ha computed_prk salt saltlen ikm ikmlen;
B.recall expected_prk;
TestLib.compare_and_print str expected_prk computed_prk (Hacl.Hash.Definitions.hash_len ha);
let computed_okm = B.alloca 0uy (okmlen + 1ul) in
let computed_okm = B.sub computed_okm 0ul okmlen in
EverCrypt.HKDF.expand ha computed_okm computed_prk prklen info infolen okmlen;
B.recall expected_okm;
TestLib.compare_and_print str expected_okm computed_okm okmlen;
pop_frame()
end
let test_hkdf = test_many !$"HKDF" test_one_hkdf
/// Chacha20
/// --------
friend Lib.IntTypes
let test_one_chacha20 (v: chacha20_vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let (LB key_len key), (LB iv_len iv), ctr, (LB plain_len plain), (LB cipher_len cipher) = v in
if cipher_len = 0xfffffffful then
failwith "Cipher too long"
else if cipher_len <> plain_len then
failwith "Cipher len and plain len don't match"
else if key_len <> 32ul then
failwith "invalid key len"
else if iv_len <> 12ul then
failwith "invalid iv len"
else if not (ctr <= 0xfffffffful - cipher_len / 64ul) then
failwith "invalid len"
else begin
push_frame ();
B.recall key;
B.recall iv;
B.recall plain;
B.recall cipher;
let cipher' = B.alloca 0uy (cipher_len + 1ul) in
let cipher' = B.sub cipher' 0ul cipher_len in
EverCrypt.Cipher.chacha20 plain_len cipher' plain key iv ctr;
TestLib.compare_and_print !$"of ChaCha20 message" cipher cipher' cipher_len;
pop_frame ()
end | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Test.Lowstarize.lbuffer Test.NoHeap.chacha20_vector -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Test.NoHeap.test_many",
"Test.NoHeap.chacha20_vector",
"Test.NoHeap.op_Bang_Dollar",
"Test.NoHeap.test_one_chacha20"
] | [] | false | true | false | false | false | let test_chacha20 =
| test_many !$"CHACHA20" test_one_chacha20 | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_loc | val mt_loc: mt_p -> GTot loc | val mt_loc: mt_p -> GTot loc | let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 59,
"end_line": 232,
"start_col": 0,
"start_line": 232
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree. | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mt: MerkleTree.Low.mt_p -> Prims.GTot LowStar.Monotonic.Buffer.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"MerkleTree.Low.mt_p",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"LowStar.Monotonic.Buffer.loc"
] | [] | false | false | false | false | false | let mt_loc mt =
| B.loc_all_regions_from false (B.frameOf mt) | false |
MerkleTree.Low.fst | MerkleTree.Low.offsets_connect | val offsets_connect (x y: offset_t) : Tot bool | val offsets_connect (x y: offset_t) : Tot bool | let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 97,
"end_line": 62,
"start_col": 0,
"start_line": 62
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32 | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: MerkleTree.Low.offset_t -> y: MerkleTree.Low.offset_t -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.offset_t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Greater_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W64",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.op_Subtraction",
"MerkleTree.Low.offset_range_limit",
"Prims.bool"
] | [] | false | false | false | true | false | let offsets_connect (x y: offset_t) : Tot bool =
| y >= x && (y - x) <= offset_range_limit | false |
MerkleTree.Low.fst | MerkleTree.Low.add64_fits | val add64_fits (x: offset_t) (i: index_t) : Tot bool | val add64_fits (x: offset_t) (i: index_t) : Tot bool | let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 80,
"end_line": 71,
"start_col": 0,
"start_line": 71
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | x: MerkleTree.Low.offset_t -> i: MerkleTree.Low.index_t -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.offset_t",
"MerkleTree.Low.index_t",
"FStar.Integers.op_Greater_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W64",
"FStar.Integers.op_Subtraction",
"MerkleTree.Low.uint64_max",
"MerkleTree.Low.u32_64",
"Prims.bool"
] | [] | false | false | false | true | false | let add64_fits (x: offset_t) (i: index_t) : Tot bool =
| uint64_max - x >= (u32_64 i) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_not_full | val mt_not_full: HS.mem -> mt_p -> GTot bool | val mt_not_full: HS.mem -> mt_p -> GTot bool | let mt_not_full h mt = mt_not_full_nst (B.get h mt 0) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 53,
"end_line": 120,
"start_col": 0,
"start_line": 120
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> mt: MerkleTree.Low.mt_p -> Prims.GTot Prims.bool | Prims.GTot | [
"sometrivial"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.mt_p",
"MerkleTree.Low.mt_not_full_nst",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"Prims.bool"
] | [] | false | false | false | false | false | let mt_not_full h mt =
| mt_not_full_nst (B.get h mt 0) | false |
MerkleTree.Low.fst | MerkleTree.Low.split_offset | val split_offset (tree: offset_t) (index: offset_t{offsets_connect tree index}) : Tot index_t | val split_offset (tree: offset_t) (index: offset_t{offsets_connect tree index}) : Tot index_t | let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 32,
"end_line": 68,
"start_col": 0,
"start_line": 65
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
tree: MerkleTree.Low.offset_t ->
index: MerkleTree.Low.offset_t{MerkleTree.Low.offsets_connect tree index}
-> MerkleTree.Low.index_t | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.offset_t",
"Prims.b2t",
"MerkleTree.Low.offsets_connect",
"FStar.Int.Cast.uint64_to_uint32",
"Prims.unit",
"Prims._assert",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W64",
"MerkleTree.Low.offset_range_limit",
"FStar.UInt64.t",
"FStar.UInt64.sub_mod",
"MerkleTree.Low.index_t"
] | [] | false | false | false | false | false | let split_offset (tree: offset_t) (index: offset_t{offsets_connect tree index}) : Tot index_t =
| [@@ inline_let ]let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff | false |
MerkleTree.Low.fst | MerkleTree.Low.join_offset | val join_offset (tree: offset_t) (i: index_t{add64_fits tree i})
: Tot (r: offset_t{offsets_connect tree r}) | val join_offset (tree: offset_t) (i: index_t{add64_fits tree i})
: Tot (r: offset_t{offsets_connect tree r}) | let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 25,
"end_line": 75,
"start_col": 0,
"start_line": 74
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | tree: MerkleTree.Low.offset_t -> i: MerkleTree.Low.index_t{MerkleTree.Low.add64_fits tree i}
-> r: MerkleTree.Low.offset_t{MerkleTree.Low.offsets_connect tree r} | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.offset_t",
"MerkleTree.Low.index_t",
"Prims.b2t",
"MerkleTree.Low.add64_fits",
"FStar.UInt64.add",
"MerkleTree.Low.u32_64",
"MerkleTree.Low.offsets_connect"
] | [] | false | false | false | false | false | let join_offset (tree: offset_t) (i: index_t{add64_fits tree i})
: Tot (r: offset_t{offsets_connect tree r}) =
| U64.add tree (u32_64 i) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_safe | val mt_safe: HS.mem -> mt_p -> GTot Type0 | val mt_safe: HS.mem -> mt_p -> GTot Type0 | let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv))) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 68,
"end_line": 227,
"start_col": 0,
"start_line": 213
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> mt: MerkleTree.Low.mt_p -> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.mt_p",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"LowStar.Monotonic.Buffer.freeable",
"LowStar.RVector.rv_inv",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__rhs",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"MerkleTree.Low.__proj__MT__item__mroot",
"MerkleTree.Low.mt_safe_elts",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__i",
"MerkleTree.Low.__proj__MT__item__j",
"Prims.b2t",
"FStar.Monotonic.HyperHeap.extends",
"LowStar.Vector.frameOf",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"FStar.Monotonic.HyperHeap.disjoint",
"LowStar.Monotonic.Buffer.get"
] | [] | false | false | false | false | true | let mt_safe h mt =
| B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
RV.rv_inv h (MT?.hs mtv) /\ RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv))) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_safe_elts | val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv)) | val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv)) | let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 61,
"end_line": 141,
"start_col": 0,
"start_line": 137
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} -> | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
i: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{j >= i}
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial",
""
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.index_t",
"FStar.Integers.op_Greater_Equals",
"Prims.bool",
"Prims.l_and",
"Prims.eq2",
"FStar.UInt32.t",
"MerkleTree.Low.Datastructures.hash",
"LowStar.Vector.get",
"FStar.Integers.op_Subtraction",
"MerkleTree.Low.mt_safe_elts",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"FStar.Integers.op_Slash",
"MerkleTree.Low.offset_of"
] | [
"recursion"
] | false | false | false | false | true | let rec mt_safe_elts #hsz h lv hs i j =
| if lv = merkle_tree_size_lg
then true
else
(let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\ mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)) | false |
MerkleTree.Low.fst | MerkleTree.Low.offset_of | val offset_of: i:index_t -> Tot index_t | val offset_of: i:index_t -> Tot index_t | let offset_of i = if i % 2ul = 0ul then i else i - 1ul | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 54,
"end_line": 125,
"start_col": 0,
"start_line": 125
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | i: MerkleTree.Low.index_t -> MerkleTree.Low.index_t | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.index_t",
"Prims.op_Equality",
"FStar.UInt32.t",
"FStar.Integers.op_Percent",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"FStar.UInt32.__uint_to_t",
"Prims.bool",
"FStar.Integers.op_Subtraction"
] | [] | false | false | false | true | false | let offset_of i =
| if i % 2ul = 0ul then i else i - 1ul | false |
Test.NoHeap.fst | Test.NoHeap.string_of_alg | val string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t | val string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t | let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256" | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 28,
"end_line": 29,
"start_col": 0,
"start_line": 12
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Spec.Hash.Definitions.hash_alg -> C.String.t | Prims.Tot | [
"total"
] | [] | [
"Spec.Hash.Definitions.hash_alg",
"C.String.op_Bang_Dollar",
"C.String.t"
] | [] | false | false | false | true | false | let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
| let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256" | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_lift | val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r}) | val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r}) | let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 35,
"end_line": 299,
"start_col": 0,
"start_line": 298
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} -> | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> mt: MerkleTree.Low.mt_p{MerkleTree.Low.mt_safe h mt}
-> Prims.GTot (r: MerkleTree.New.High.merkle_tree{MerkleTree.New.High.mt_wf_elts r}) | Prims.GTot | [
"sometrivial"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.mt_p",
"MerkleTree.Low.mt_safe",
"MerkleTree.Low.merkle_tree_lift",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.New.High.merkle_tree",
"FStar.UInt32.v",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.New.High.mt_wf_elts"
] | [] | false | false | false | false | false | let mt_lift h mt =
| merkle_tree_lift h (B.get h mt 0) | false |
MerkleTree.Low.fst | MerkleTree.Low.path_loc | val path_loc: path_p -> GTot loc | val path_loc: path_p -> GTot loc | let path_loc p = B.loc_all_regions_from false (B.frameOf p) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 59,
"end_line": 1080,
"start_col": 0,
"start_line": 1080
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: MerkleTree.Low.path_p -> Prims.GTot LowStar.Monotonic.Buffer.loc | Prims.GTot | [
"sometrivial"
] | [] | [
"MerkleTree.Low.path_p",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Monotonic.Buffer.loc"
] | [] | false | false | false | false | false | let path_loc p =
| B.loc_all_regions_from false (B.frameOf p) | false |
Test.NoHeap.fst | Test.NoHeap.test_poly1305 | val test_poly1305 (_: unit) : Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_poly1305 (_: unit) : Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_poly1305 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"poly1305" test_one_poly1305 Test.Vectors.Poly1305.(LB vectors_len vectors) | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 89,
"end_line": 244,
"start_col": 0,
"start_line": 243
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a
val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end
let test_hmac = test_many !$"HMAC" test_one_hmac
/// HKDF
/// ----
val test_one_hkdf: hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hkdf vec =
let ha, (LB ikmlen ikm), (LB saltlen salt),
(LB infolen info), (LB prklen expected_prk), (LB okmlen expected_okm) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if prklen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected PRK\n"
else if okmlen > 255ul * Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong output length\n"
else if not (keysized ha saltlen) then
failwith "Saltlen is not keysized\n"
else if not (keysized ha prklen) then
failwith "Prklen is not keysized\n"
else if not (ikmlen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "ikmlen is too large\n"
else if not (infolen <= 0xfffffffful -
Hacl.Hash.Definitions.(block_len ha + hash_len ha + 1ul)) then
failwith "infolen is too large\n"
else if EverCrypt.HMAC.is_supported_alg ha then begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v saltlen));
assert (v ikmlen + Spec.Hash.Definitions.block_length ha < pow2 32);
assert Spec.Hash.Definitions.(hash_length ha
+ v infolen + 1 + block_length ha < pow2 32);
B.recall salt;
B.recall ikm;
B.recall info;
let str = string_of_alg ha in
let computed_prk = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HKDF.extract ha computed_prk salt saltlen ikm ikmlen;
B.recall expected_prk;
TestLib.compare_and_print str expected_prk computed_prk (Hacl.Hash.Definitions.hash_len ha);
let computed_okm = B.alloca 0uy (okmlen + 1ul) in
let computed_okm = B.sub computed_okm 0ul okmlen in
EverCrypt.HKDF.expand ha computed_okm computed_prk prklen info infolen okmlen;
B.recall expected_okm;
TestLib.compare_and_print str expected_okm computed_okm okmlen;
pop_frame()
end
let test_hkdf = test_many !$"HKDF" test_one_hkdf
/// Chacha20
/// --------
friend Lib.IntTypes
let test_one_chacha20 (v: chacha20_vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let (LB key_len key), (LB iv_len iv), ctr, (LB plain_len plain), (LB cipher_len cipher) = v in
if cipher_len = 0xfffffffful then
failwith "Cipher too long"
else if cipher_len <> plain_len then
failwith "Cipher len and plain len don't match"
else if key_len <> 32ul then
failwith "invalid key len"
else if iv_len <> 12ul then
failwith "invalid iv len"
else if not (ctr <= 0xfffffffful - cipher_len / 64ul) then
failwith "invalid len"
else begin
push_frame ();
B.recall key;
B.recall iv;
B.recall plain;
B.recall cipher;
let cipher' = B.alloca 0uy (cipher_len + 1ul) in
let cipher' = B.sub cipher' 0ul cipher_len in
EverCrypt.Cipher.chacha20 plain_len cipher' plain key iv ctr;
TestLib.compare_and_print !$"of ChaCha20 message" cipher cipher' cipher_len;
pop_frame ()
end
let test_chacha20 = test_many !$"CHACHA20" test_one_chacha20
/// Using generated vectors in the vectors/ directory
/// =================================================
/// Poly1305
/// --------
let test_one_poly1305 (v: Test.Vectors.Poly1305.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Poly1305 in
let Vector tag tag_len key key_len input input_len = v in
push_frame ();
if not (4294967295ul `U32.sub` 16ul `U32.gte` input_len)
then
failwith "Error: skipping a test_poly1305 instance because bounds do not hold\n"
else begin
B.recall key;
B.recall tag;
B.recall input;
let h0 = get () in
let dst = B.alloca 0uy 16ul in
let h1 = get () in
B.recall input;
B.recall key;
B.recall tag;
if key_len = 32ul then
EverCrypt.Poly1305.mac dst input input_len key;
B.recall tag;
if tag_len = 16ul then
TestLib.compare_and_print !$"Poly1305" tag dst 16ul
end;
pop_frame () | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Prims.unit -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Prims.unit",
"Test.NoHeap.test_many",
"Test.Vectors.Poly1305.vector",
"Test.NoHeap.op_Bang_Dollar",
"Test.NoHeap.test_one_poly1305",
"Test.Lowstarize.LB",
"Test.Vectors.Poly1305.vectors_len",
"Test.Vectors.Poly1305.vectors",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_True"
] | [] | false | true | false | false | false | let test_poly1305 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
| test_many !$"poly1305" test_one_poly1305 Test.Vectors.Poly1305.(LB vectors_len vectors) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_safe_elts_init | val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv)) | val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv)) | let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 45,
"end_line": 182,
"start_col": 0,
"start_line": 180
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg}
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.forall_ h
hs
lv
(LowStar.Vector.size_of hs)
(fun hv -> LowStar.Vector.size_of hv = 0ul))
(ensures MerkleTree.Low.mt_safe_elts h lv hs 0ul 0ul)
(decreases 32 - FStar.UInt32.v lv) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"Prims.bool",
"MerkleTree.Low.mt_safe_elts_init",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"Prims.unit"
] | [
"recursion"
] | false | false | true | false | false | let rec mt_safe_elts_init #hsz h lv hs =
| if lv = merkle_tree_size_lg then () else mt_safe_elts_init #hsz h (lv + 1ul) hs | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_preserved | val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt)) | val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt)) | let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1 | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 60,
"end_line": 321,
"start_col": 0,
"start_line": 308
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1; | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mt: MerkleTree.Low.mt_p ->
p: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.mt_safe h0 mt /\
LowStar.Monotonic.Buffer.loc_disjoint p (MerkleTree.Low.mt_loc mt) /\
LowStar.Monotonic.Buffer.modifies p h0 h1)
(ensures
(MerkleTree.Low.mt_safe_preserved mt p h0 h1;
MerkleTree.Low.mt_lift h0 mt == MerkleTree.Low.mt_lift h1 mt)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"MerkleTree.Low.mt_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.modifies_buffer_elim",
"Lib.IntTypes.uint8",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.merkle_tree",
"Prims.unit",
"LowStar.RVector.as_seq_preserved",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.loc_buffer",
"LowStar.RVector.loc_rvector",
"Prims.eq2"
] | [] | true | false | true | false | false | let mt_preserved mt p h0 h1 =
| assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt)) (B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1 | false |
MerkleTree.Low.fst | MerkleTree.Low.loc_union_assoc_4 | val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d)) | val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d)) | let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 37,
"end_line": 598,
"start_col": 0,
"start_line": 594
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) == | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
a: LowStar.Monotonic.Buffer.loc ->
b: LowStar.Monotonic.Buffer.loc ->
c: LowStar.Monotonic.Buffer.loc ->
d: LowStar.Monotonic.Buffer.loc
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union a b)
(LowStar.Monotonic.Buffer.loc_union c d) ==
LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union a c)
(LowStar.Monotonic.Buffer.loc_union b d)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_union_assoc",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.unit"
] | [] | true | false | true | false | false | let loc_union_assoc_4 a b c d =
| loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_safe_preserved | val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt)) | val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt)) | let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1 | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 73,
"end_line": 253,
"start_col": 0,
"start_line": 241
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\ | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mt: MerkleTree.Low.mt_p ->
p: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.mt_safe h0 mt /\
LowStar.Monotonic.Buffer.loc_disjoint p (MerkleTree.Low.mt_loc mt) /\
LowStar.Monotonic.Buffer.modifies p h0 h1)
(ensures
LowStar.Monotonic.Buffer.get h0 mt 0 == LowStar.Monotonic.Buffer.get h1 mt 0 /\
MerkleTree.Low.mt_safe h1 mt) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"MerkleTree.Low.mt_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.mt_safe_elts_preserved",
"MerkleTree.Low.__proj__MT__item__hash_size",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__hs",
"MerkleTree.Low.__proj__MT__item__i",
"MerkleTree.Low.__proj__MT__item__j",
"Prims.unit",
"LowStar.Vector.loc_vector_within_included",
"MerkleTree.Low.Datastructures.hash_vec",
"LowStar.Vector.size_of",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.RVector.rv_inv_preserved",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hvreg",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"MerkleTree.Low.mt_loc",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.loc_vector",
"LowStar.RVector.loc_rvector",
"MerkleTree.Low.merkle_tree",
"LowStar.Monotonic.Buffer.get",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | true | false | true | false | false | let mt_safe_preserved mt p h0 h1 =
| assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1 | false |
MerkleTree.Low.fst | MerkleTree.Low.insert_modifies_union_loc_weakening | val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1)) | val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1)) | let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 63,
"end_line": 669,
"start_col": 0,
"start_line": 667
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
l1: LowStar.Monotonic.Buffer.loc ->
l2: LowStar.Monotonic.Buffer.loc ->
l3: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.modifies l1 h0 h1)
(ensures
LowStar.Monotonic.Buffer.modifies (LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union
l1
l2)
l3)
h0
h1) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.loc_includes_union_l",
"LowStar.Monotonic.Buffer.loc_union",
"Prims.unit"
] | [] | true | false | true | false | false | let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
| B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_safe_elts_preserved | val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)] | val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)] | let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 77,
"end_line": 205,
"start_col": 0,
"start_line": 202
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
i: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{j >= i} ->
p: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.live h0 hs /\ MerkleTree.Low.mt_safe_elts h0 lv hs i j /\
LowStar.Monotonic.Buffer.loc_disjoint p
(LowStar.Vector.loc_vector_within hs lv (LowStar.Vector.size_of hs)) /\
LowStar.Monotonic.Buffer.modifies p h0 h1)
(ensures MerkleTree.Low.mt_safe_elts h1 lv hs i j)
(decreases 32 - FStar.UInt32.v lv)
[
SMTPat (LowStar.Vector.live h0 hs);
SMTPat (MerkleTree.Low.mt_safe_elts h0 lv hs i j);
SMTPat (LowStar.Monotonic.Buffer.loc_disjoint p (LowStar.RVector.loc_rvector hs));
SMTPat (LowStar.Monotonic.Buffer.modifies p h0 h1)
] | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.index_t",
"FStar.Integers.op_Greater_Equals",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims.bool",
"MerkleTree.Low.mt_safe_elts_preserved",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"FStar.Integers.op_Slash",
"Prims.unit",
"LowStar.Vector.get_preserved"
] | [
"recursion"
] | false | false | true | false | false | let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
| if lv = merkle_tree_size_lg
then ()
else
(V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1) | false |
Test.NoHeap.fst | Test.NoHeap.test_chacha20poly1305 | val test_chacha20poly1305 (_: unit) : Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_chacha20poly1305 (_: unit) : Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_chacha20poly1305 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"chacha20poly1305" test_one_chacha20poly1305 Test.Vectors.Chacha20Poly1305.(LB vectors_len vectors) | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 113,
"end_line": 315,
"start_col": 0,
"start_line": 314
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a
val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end
let test_hmac = test_many !$"HMAC" test_one_hmac
/// HKDF
/// ----
val test_one_hkdf: hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hkdf vec =
let ha, (LB ikmlen ikm), (LB saltlen salt),
(LB infolen info), (LB prklen expected_prk), (LB okmlen expected_okm) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if prklen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected PRK\n"
else if okmlen > 255ul * Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong output length\n"
else if not (keysized ha saltlen) then
failwith "Saltlen is not keysized\n"
else if not (keysized ha prklen) then
failwith "Prklen is not keysized\n"
else if not (ikmlen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "ikmlen is too large\n"
else if not (infolen <= 0xfffffffful -
Hacl.Hash.Definitions.(block_len ha + hash_len ha + 1ul)) then
failwith "infolen is too large\n"
else if EverCrypt.HMAC.is_supported_alg ha then begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v saltlen));
assert (v ikmlen + Spec.Hash.Definitions.block_length ha < pow2 32);
assert Spec.Hash.Definitions.(hash_length ha
+ v infolen + 1 + block_length ha < pow2 32);
B.recall salt;
B.recall ikm;
B.recall info;
let str = string_of_alg ha in
let computed_prk = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HKDF.extract ha computed_prk salt saltlen ikm ikmlen;
B.recall expected_prk;
TestLib.compare_and_print str expected_prk computed_prk (Hacl.Hash.Definitions.hash_len ha);
let computed_okm = B.alloca 0uy (okmlen + 1ul) in
let computed_okm = B.sub computed_okm 0ul okmlen in
EverCrypt.HKDF.expand ha computed_okm computed_prk prklen info infolen okmlen;
B.recall expected_okm;
TestLib.compare_and_print str expected_okm computed_okm okmlen;
pop_frame()
end
let test_hkdf = test_many !$"HKDF" test_one_hkdf
/// Chacha20
/// --------
friend Lib.IntTypes
let test_one_chacha20 (v: chacha20_vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let (LB key_len key), (LB iv_len iv), ctr, (LB plain_len plain), (LB cipher_len cipher) = v in
if cipher_len = 0xfffffffful then
failwith "Cipher too long"
else if cipher_len <> plain_len then
failwith "Cipher len and plain len don't match"
else if key_len <> 32ul then
failwith "invalid key len"
else if iv_len <> 12ul then
failwith "invalid iv len"
else if not (ctr <= 0xfffffffful - cipher_len / 64ul) then
failwith "invalid len"
else begin
push_frame ();
B.recall key;
B.recall iv;
B.recall plain;
B.recall cipher;
let cipher' = B.alloca 0uy (cipher_len + 1ul) in
let cipher' = B.sub cipher' 0ul cipher_len in
EverCrypt.Cipher.chacha20 plain_len cipher' plain key iv ctr;
TestLib.compare_and_print !$"of ChaCha20 message" cipher cipher' cipher_len;
pop_frame ()
end
let test_chacha20 = test_many !$"CHACHA20" test_one_chacha20
/// Using generated vectors in the vectors/ directory
/// =================================================
/// Poly1305
/// --------
let test_one_poly1305 (v: Test.Vectors.Poly1305.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Poly1305 in
let Vector tag tag_len key key_len input input_len = v in
push_frame ();
if not (4294967295ul `U32.sub` 16ul `U32.gte` input_len)
then
failwith "Error: skipping a test_poly1305 instance because bounds do not hold\n"
else begin
B.recall key;
B.recall tag;
B.recall input;
let h0 = get () in
let dst = B.alloca 0uy 16ul in
let h1 = get () in
B.recall input;
B.recall key;
B.recall tag;
if key_len = 32ul then
EverCrypt.Poly1305.mac dst input input_len key;
B.recall tag;
if tag_len = 16ul then
TestLib.compare_and_print !$"Poly1305" tag dst 16ul
end;
pop_frame ()
let test_poly1305 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"poly1305" test_one_poly1305 Test.Vectors.Poly1305.(LB vectors_len vectors)
/// Curve25519
/// ----------
let test_one_curve25519 (v: Test.Vectors.Curve25519.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Curve25519 in
let Vector result result_len public public_len private_ private__len valid = v in
push_frame ();
B.recall result;
B.recall public;
B.recall private_;
let h0 = get () in
let dst = B.alloca 0uy 32ul in
let h1 = get () in
B.recall result;
B.recall public;
B.recall private_;
if public_len = 32ul && private__len = 32ul then
EverCrypt.Curve25519.scalarmult dst private_ public;
B.recall result;
if result_len = 32ul && valid then
TestLib.compare_and_print !$"Curve25519" result dst 32ul;
pop_frame ()
let test_curve25519 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"curve25519" test_one_curve25519 Test.Vectors.Curve25519.(LB vectors_len vectors)
/// Chacha20-Poly1305
/// -----------------
#push-options "--z3rlimit 32"
let test_one_chacha20poly1305 (v: Test.Vectors.Chacha20Poly1305.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let Test.Vectors.Chacha20Poly1305.Vector cipher_and_tag cipher_and_tag_len plain plain_len aad aad_len nonce nonce_len key key_len = v in
if not (key_len = 32ul)
then failwith "chacha20poly1305: not (key_len = 32ul)"
else if not (nonce_len = 12ul)
then failwith "chacha20poly1305: not (nonce_len = 12ul)"
else if not ((4294967295ul `U32.sub` 16ul) `U32.gte` plain_len)
then failwith "chacha20poly1305: not ((4294967295ul `U32.sub` 16ul) `U32.gte` plain_len)"
else if not ((plain_len `U32.div` 64ul) `U32.lte` (4294967295ul `U32.sub` aad_len))
then failwith "chacha20poly1305: not ((plain_len `U32.div` 64ul) `U32.lte` (4294967295ul `U32.sub` aad_len))"
else if not (cipher_and_tag_len = plain_len `U32.add` 16ul)
then failwith "chacha20poly1305: not (cipher_and_tag_len = plain_len `U32.add` 16ul)"
else begin
B.recall plain;
B.recall cipher_and_tag;
B.recall aad;
B.recall nonce;
B.recall key;
push_frame ();
let tmp = B.alloca 0uy (plain_len `U32.add` 16ul) in
let tmp_msg' = B.sub tmp 0ul plain_len in
let tag' = B.sub tmp plain_len 16ul in
EverCrypt.Chacha20Poly1305.aead_encrypt key nonce aad_len aad plain_len plain tmp_msg' tag';
TestLib.compare_and_print !$"chacha20poly1305 cipher and tag" cipher_and_tag tmp cipher_and_tag_len;
let cipher = B.sub cipher_and_tag 0ul plain_len in
let tag = B.sub cipher_and_tag plain_len 16ul in
let res = EverCrypt.Chacha20Poly1305.aead_decrypt key nonce aad_len aad plain_len tmp_msg' cipher tag in
if res = 0ul
then
TestLib.compare_and_print !$"chacha20poly1305 plain" plain tmp_msg' plain_len
else
failwith "Failure: chacha20poly1305 aead_decrypt returned nonzero value";
pop_frame ()
end
#pop-options | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Prims.unit -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Prims.unit",
"Test.NoHeap.test_many",
"Test.Vectors.Chacha20Poly1305.vector",
"Test.NoHeap.op_Bang_Dollar",
"Test.NoHeap.test_one_chacha20poly1305",
"Test.Lowstarize.LB",
"Test.Vectors.Chacha20Poly1305.vectors_len",
"Test.Vectors.Chacha20Poly1305.vectors",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_True"
] | [] | false | true | false | false | false | let test_chacha20poly1305 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
| test_many !$"chacha20poly1305"
test_one_chacha20poly1305
Test.Vectors.Chacha20Poly1305.(LB vectors_len vectors) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_insert_pre_nst | val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool | val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool | let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 100,
"end_line": 927,
"start_col": 0,
"start_line": 927
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtv: MerkleTree.Low.merkle_tree -> v: MerkleTree.Low.Datastructures.hash -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.merkle_tree",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__MT__item__hash_size",
"Prims.op_AmpAmp",
"MerkleTree.Low.mt_not_full_nst",
"MerkleTree.Low.add64_fits",
"MerkleTree.Low.__proj__MT__item__offset",
"FStar.Integers.op_Plus",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.__proj__MT__item__j",
"FStar.UInt32.__uint_to_t",
"Prims.bool"
] | [] | false | false | false | false | false | let mt_insert_pre_nst mtv v =
| mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul) | false |
Test.NoHeap.fst | Test.NoHeap.test_curve25519 | val test_curve25519 (_: unit) : Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_curve25519 (_: unit) : Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_curve25519 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"curve25519" test_one_curve25519 Test.Vectors.Curve25519.(LB vectors_len vectors) | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 95,
"end_line": 270,
"start_col": 0,
"start_line": 269
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a
val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end
let test_hmac = test_many !$"HMAC" test_one_hmac
/// HKDF
/// ----
val test_one_hkdf: hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hkdf vec =
let ha, (LB ikmlen ikm), (LB saltlen salt),
(LB infolen info), (LB prklen expected_prk), (LB okmlen expected_okm) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if prklen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected PRK\n"
else if okmlen > 255ul * Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong output length\n"
else if not (keysized ha saltlen) then
failwith "Saltlen is not keysized\n"
else if not (keysized ha prklen) then
failwith "Prklen is not keysized\n"
else if not (ikmlen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "ikmlen is too large\n"
else if not (infolen <= 0xfffffffful -
Hacl.Hash.Definitions.(block_len ha + hash_len ha + 1ul)) then
failwith "infolen is too large\n"
else if EverCrypt.HMAC.is_supported_alg ha then begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v saltlen));
assert (v ikmlen + Spec.Hash.Definitions.block_length ha < pow2 32);
assert Spec.Hash.Definitions.(hash_length ha
+ v infolen + 1 + block_length ha < pow2 32);
B.recall salt;
B.recall ikm;
B.recall info;
let str = string_of_alg ha in
let computed_prk = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HKDF.extract ha computed_prk salt saltlen ikm ikmlen;
B.recall expected_prk;
TestLib.compare_and_print str expected_prk computed_prk (Hacl.Hash.Definitions.hash_len ha);
let computed_okm = B.alloca 0uy (okmlen + 1ul) in
let computed_okm = B.sub computed_okm 0ul okmlen in
EverCrypt.HKDF.expand ha computed_okm computed_prk prklen info infolen okmlen;
B.recall expected_okm;
TestLib.compare_and_print str expected_okm computed_okm okmlen;
pop_frame()
end
let test_hkdf = test_many !$"HKDF" test_one_hkdf
/// Chacha20
/// --------
friend Lib.IntTypes
let test_one_chacha20 (v: chacha20_vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let (LB key_len key), (LB iv_len iv), ctr, (LB plain_len plain), (LB cipher_len cipher) = v in
if cipher_len = 0xfffffffful then
failwith "Cipher too long"
else if cipher_len <> plain_len then
failwith "Cipher len and plain len don't match"
else if key_len <> 32ul then
failwith "invalid key len"
else if iv_len <> 12ul then
failwith "invalid iv len"
else if not (ctr <= 0xfffffffful - cipher_len / 64ul) then
failwith "invalid len"
else begin
push_frame ();
B.recall key;
B.recall iv;
B.recall plain;
B.recall cipher;
let cipher' = B.alloca 0uy (cipher_len + 1ul) in
let cipher' = B.sub cipher' 0ul cipher_len in
EverCrypt.Cipher.chacha20 plain_len cipher' plain key iv ctr;
TestLib.compare_and_print !$"of ChaCha20 message" cipher cipher' cipher_len;
pop_frame ()
end
let test_chacha20 = test_many !$"CHACHA20" test_one_chacha20
/// Using generated vectors in the vectors/ directory
/// =================================================
/// Poly1305
/// --------
let test_one_poly1305 (v: Test.Vectors.Poly1305.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Poly1305 in
let Vector tag tag_len key key_len input input_len = v in
push_frame ();
if not (4294967295ul `U32.sub` 16ul `U32.gte` input_len)
then
failwith "Error: skipping a test_poly1305 instance because bounds do not hold\n"
else begin
B.recall key;
B.recall tag;
B.recall input;
let h0 = get () in
let dst = B.alloca 0uy 16ul in
let h1 = get () in
B.recall input;
B.recall key;
B.recall tag;
if key_len = 32ul then
EverCrypt.Poly1305.mac dst input input_len key;
B.recall tag;
if tag_len = 16ul then
TestLib.compare_and_print !$"Poly1305" tag dst 16ul
end;
pop_frame ()
let test_poly1305 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"poly1305" test_one_poly1305 Test.Vectors.Poly1305.(LB vectors_len vectors)
/// Curve25519
/// ----------
let test_one_curve25519 (v: Test.Vectors.Curve25519.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Curve25519 in
let Vector result result_len public public_len private_ private__len valid = v in
push_frame ();
B.recall result;
B.recall public;
B.recall private_;
let h0 = get () in
let dst = B.alloca 0uy 32ul in
let h1 = get () in
B.recall result;
B.recall public;
B.recall private_;
if public_len = 32ul && private__len = 32ul then
EverCrypt.Curve25519.scalarmult dst private_ public;
B.recall result;
if result_len = 32ul && valid then
TestLib.compare_and_print !$"Curve25519" result dst 32ul;
pop_frame () | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Prims.unit -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Prims.unit",
"Test.NoHeap.test_many",
"Test.Vectors.Curve25519.vector",
"Test.NoHeap.op_Bang_Dollar",
"Test.NoHeap.test_one_curve25519",
"Test.Lowstarize.LB",
"Test.Vectors.Curve25519.vectors_len",
"Test.Vectors.Curve25519.vectors",
"FStar.Monotonic.HyperStack.mem",
"Prims.l_True"
] | [] | false | true | false | false | false | let test_curve25519 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
| test_many !$"curve25519" test_one_curve25519 Test.Vectors.Curve25519.(LB vectors_len vectors) | false |
MerkleTree.Low.fst | MerkleTree.Low.merkle_tree_lift | val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r}) | val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r}) | let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv)) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 38,
"end_line": 293,
"start_col": 0,
"start_line": 284
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} -> | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
mtv:
MerkleTree.Low.merkle_tree
{ LowStar.RVector.rv_inv h (MT?.hs mtv) /\ LowStar.RVector.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
MerkleTree.Low.mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) }
-> Prims.GTot (r: MerkleTree.New.High.merkle_tree{MerkleTree.New.High.mt_wf_elts r}) | Prims.GTot | [
"sometrivial"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.merkle_tree",
"Prims.l_and",
"LowStar.RVector.rv_inv",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__rhs",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"MerkleTree.Low.__proj__MT__item__mroot",
"MerkleTree.Low.mt_safe_elts",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__i",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.New.High.MT",
"FStar.UInt32.v",
"LowStar.RVector.as_seq",
"MerkleTree.Low.__proj__MT__item__rhs_ok",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"FStar.Ghost.reveal",
"MerkleTree.Spec.hash_fun_t",
"MerkleTree.Low.__proj__MT__item__hash_spec",
"Prims.unit",
"MerkleTree.Low.mt_safe_elts_spec",
"MerkleTree.New.High.merkle_tree",
"MerkleTree.New.High.mt_wf_elts"
] | [] | false | false | false | false | false | let merkle_tree_lift h mtv =
| mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv)) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_insert_pre | val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True)) | val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True)) | let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 24,
"end_line": 935,
"start_col": 0,
"start_line": 932
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mt: MerkleTree.Low.const_mt_p -> v: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.bool | FStar.HyperStack.ST.ST | [] | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"MerkleTree.Low.mt_insert_pre_nst",
"Prims.unit",
"Prims._assert",
"Prims.eq2",
"MerkleTree.Low.__proj__MT__item__hash_size",
"Prims.bool",
"MerkleTree.Low.merkle_tree",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.ConstBuffer.cast"
] | [] | false | true | false | false | false | let mt_insert_pre #hsz mt v =
| let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_get_root_pre_nst | val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool | val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool | let mt_get_root_pre_nst mtv rt = true | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 37,
"end_line": 1514,
"start_col": 0,
"start_line": 1514
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtv: MerkleTree.Low.merkle_tree -> rt: MerkleTree.Low.Datastructures.hash -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.merkle_tree",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__MT__item__hash_size",
"Prims.bool"
] | [] | false | false | false | false | false | let mt_get_root_pre_nst mtv rt =
| true | false |
MerkleTree.Low.fst | MerkleTree.Low.as_seq_sub_upd | val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)))))) | val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)))))) | let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 70,
"end_line": 419,
"start_col": 0,
"start_line": 408
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)))))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
rv: LowStar.RVector.rvector rg ->
i: LowStar.Vector.uint32_t{i < LowStar.Vector.size_of rv} ->
v: Rgl?.repr rg
-> FStar.Pervasives.Lemma (requires LowStar.RVector.rv_inv h rv)
(ensures
FStar.Seq.Base.equal (FStar.Seq.Base.upd (LowStar.RVector.as_seq h rv) (FStar.UInt32.v i) v)
(FStar.Seq.Base.append (LowStar.RVector.as_seq_sub h rv 0ul i)
(FStar.Seq.Base.cons v
(LowStar.RVector.as_seq_sub h rv (i + 1ul) (LowStar.Vector.size_of rv))))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"LowStar.Regional.regional",
"FStar.Monotonic.HyperStack.mem",
"LowStar.RVector.rvector",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowStar.Vector.size_of",
"LowStar.Regional.__proj__Rgl__item__repr",
"Prims._assert",
"Prims.eq2",
"FStar.Seq.Base.index",
"FStar.Seq.Base.upd",
"LowStar.RVector.as_seq",
"FStar.UInt32.v",
"Prims.unit",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice",
"FStar.Integers.op_Plus",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"LowStar.RVector.as_seq_sub",
"FStar.UInt32.__uint_to_t",
"LowStar.RVector.as_seq_seq_slice",
"LowStar.Vector.as_seq",
"FStar.Seq.Properties.slice_upd"
] | [] | true | false | true | false | false | let as_seq_sub_upd #a #rst #rg h rv i v =
| Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv) 0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i)) (RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg
h
(V.as_seq h rv)
0
(U32.v (V.size_of rv))
(U32.v i + 1)
(U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v) | false |
MerkleTree.Low.fst | MerkleTree.Low.phashes | val phashes (h: HS.mem) (p: path_p) : GTot (V.vector (hash #(Path?.hash_size (B.get h p 0)))) | val phashes (h: HS.mem) (p: path_p) : GTot (V.vector (hash #(Path?.hash_size (B.get h p 0)))) | let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 28,
"end_line": 1062,
"start_col": 0,
"start_line": 1060
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> p: MerkleTree.Low.path_p
-> Prims.GTot (LowStar.Vector.vector MerkleTree.Low.Datastructures.hash) | Prims.GTot | [
"sometrivial"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.path_p",
"MerkleTree.Low.__proj__Path__item__hashes",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.vector",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size"
] | [] | false | false | false | false | false | let phashes (h: HS.mem) (p: path_p) : GTot (V.vector (hash #(Path?.hash_size (B.get h p 0)))) =
| Path?.hashes (B.get h p 0) | false |
MerkleTree.Low.fst | MerkleTree.Low.path_safe | val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0 | val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0 | let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p)) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 32,
"end_line": 1077,
"start_col": 0,
"start_line": 1068
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe: | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | h: FStar.Monotonic.HyperStack.mem -> mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.path_p
-> Prims.GTot Type0 | Prims.GTot | [
"sometrivial"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Monotonic.Buffer.freeable",
"LowStar.Vector.live",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.phashes",
"LowStar.Vector.freeable",
"FStar.HyperStack.ST.is_eternal_region",
"LowStar.Vector.frameOf",
"LowStar.Vector.forall_all",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hreg",
"Prims.b2t",
"FStar.Monotonic.HyperHeap.includes",
"LowStar.Regional.__proj__Rgl__item__region_of",
"FStar.Monotonic.HyperHeap.extends",
"LowStar.Monotonic.Buffer.frameOf",
"FStar.Monotonic.HyperHeap.disjoint"
] | [] | false | false | false | false | true | let path_safe h mtr p =
| B.live h p /\ B.freeable p /\ V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h
(phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\ HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\ HH.disjoint mtr (B.frameOf p)) | false |
MerkleTree.Low.fst | MerkleTree.Low.insert_modifies_rec_helper | val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc) | val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc) | let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 54,
"end_line": 659,
"start_col": 0,
"start_line": 624
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
lv: LowStar.Vector.uint32_t{lv < MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
aloc: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union
(LowStar.RVector.rs_loc_elem (MerkleTree.Low.Datastructures.hvreg hsz)
(LowStar.Vector.as_seq h hs)
(FStar.UInt32.v lv))
(LowStar.Vector.loc_vector_within hs lv (lv + 1ul)))
aloc)
(LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union (LowStar.RVector.rv_loc_elems
h
hs
(lv + 1ul)
(LowStar.Vector.size_of hs))
(LowStar.Vector.loc_vector_within hs (lv + 1ul) (LowStar.Vector.size_of hs)))
aloc) ==
LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union (LowStar.RVector.rv_loc_elems
h
hs
lv
(LowStar.Vector.size_of hs))
(LowStar.Vector.loc_vector_within hs lv (LowStar.Vector.size_of hs)))
aloc) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.loc_union_assoc_4",
"LowStar.RVector.rs_loc_elem",
"MerkleTree.Low.Datastructures.hvreg",
"LowStar.Vector.as_seq",
"FStar.UInt32.v",
"LowStar.Vector.loc_vector_within",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"LowStar.RVector.rv_loc_elems",
"Prims.unit",
"LowStar.Monotonic.Buffer.loc_union_assoc",
"LowStar.Monotonic.Buffer.loc_union",
"Prims._assert",
"Prims.eq2",
"LowStar.RVector.rs_loc_elems_rec_inverse"
] | [] | true | false | true | false | false | let insert_modifies_rec_helper #hsz lv hs aloc h =
| assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc
(loc_union (loc_union (RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc (loc_union (RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc
aloc;
loc_union_assoc (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union (RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4 (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)) | false |
Test.NoHeap.fst | Test.NoHeap.test_one_hmac | val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 7,
"end_line": 127,
"start_col": 0,
"start_line": 104
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | vec: Test.NoHeap.hmac_vector -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Test.NoHeap.hmac_vector",
"Spec.Hash.Definitions.hash_alg",
"FStar.UInt32.t",
"LowStar.Buffer.buffer",
"FStar.UInt8.t",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.len",
"LowStar.Buffer.trivial_preorder",
"LowStar.Monotonic.Buffer.recallable",
"Spec.Hash.Definitions.is_shake",
"Test.NoHeap.failwith",
"Prims.unit",
"Prims.bool",
"Prims.op_disEquality",
"Hacl.Hash.Definitions.hash_len",
"Prims.op_Negation",
"Test.NoHeap.keysized",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"FStar.Integers.op_Subtraction",
"FStar.UInt32.__uint_to_t",
"Hacl.Hash.Definitions.block_len",
"EverCrypt.HMAC.is_supported_alg",
"FStar.HyperStack.ST.pop_frame",
"TestLib.compare_and_print",
"LowStar.Monotonic.Buffer.recall",
"C.String.t",
"Test.NoHeap.string_of_alg",
"EverCrypt.HMAC.compute",
"LowStar.Monotonic.Buffer.mbuffer",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.UInt32.v",
"Prims.b2t",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Buffer.alloca",
"Lib.IntTypes.uint8",
"FStar.UInt8.__uint_to_t",
"Prims._assert",
"FStar.Integers.op_Less",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Integers.op_Plus",
"FStar.Integers.v",
"Spec.Hash.Definitions.block_length",
"Prims.pow2",
"Spec.Agile.HMAC.keysized",
"FStar.HyperStack.ST.push_frame"
] | [] | false | true | false | false | false | let test_one_hmac vec =
| let ha, LB keylen key, LB datalen data, LB expectedlen expected = vec in
if Spec.Hash.Definitions.is_shake ha
then failwith "unsupported shake algorithm"
else
if expectedlen <> Hacl.Hash.Definitions.hash_len ha
then failwith "Wrong length of expected tag\n"
else
if not (keysized ha keylen)
then failwith "Keysized predicate not satisfied\n"
else
if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha)
then failwith "Datalen predicate not satisfied\n"
else
if EverCrypt.HMAC.is_supported_alg ha
then
(push_frame ();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame ()) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_get_path_step_pre_nst | val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool | val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool | let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 32,
"end_line": 1860,
"start_col": 0,
"start_line": 1859
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t -> | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.path -> i: LowStar.Vector.uint32_t
-> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path",
"LowStar.Vector.uint32_t",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"MerkleTree.Low.__proj__Path__item__hashes",
"Prims.bool"
] | [] | false | false | false | true | false | let mt_get_path_step_pre_nst #hsz mtr p i =
| i < V.size_of (Path?.hashes p) | false |
Test.NoHeap.fst | Test.NoHeap.keysized | val keysized (a: H.alg) (l: UInt32.t) : Tot (b: bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l)}) | val keysized (a: H.alg) (l: UInt32.t) : Tot (b: bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l)}) | let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 55,
"end_line": 101,
"start_col": 0,
"start_line": 97
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ---- | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | a: EverCrypt.Hash.alg -> l: FStar.UInt32.t
-> b: Prims.bool{b ==> Spec.Agile.HMAC.keysized a (FStar.UInt32.v l)} | Prims.Tot | [
"total"
] | [] | [
"EverCrypt.Hash.alg",
"FStar.UInt32.t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"FStar.Integers.op_Subtraction",
"FStar.UInt32.__uint_to_t",
"Hacl.Hash.Definitions.block_len",
"Prims.unit",
"FStar.Pervasives.assert_norm",
"Prims.b2t",
"Prims.op_Equality",
"FStar.Integers.int_t",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Integers.v",
"Prims.pow2",
"Prims._assert",
"Spec.Hash.Definitions.less_than_max_input_length",
"EverCrypt.Hash.uint32_fits_maxLength",
"Prims.bool",
"Prims.l_imp",
"Spec.Agile.HMAC.keysized",
"FStar.UInt32.v"
] | [] | false | false | false | false | false | let keysized (a: H.alg) (l: UInt32.t) : Tot (b: bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l)}) =
| EverCrypt.Hash.uint32_fits_maxLength a l;
assert ((v l) `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a | false |
MerkleTree.Low.fst | MerkleTree.Low.path_safe_preserved_ | val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j) | val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j) | let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 54,
"end_line": 1188,
"start_col": 0,
"start_line": 1181
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mtr: FStar.Monotonic.HyperHeap.rid ->
hs: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j: FStar.Integers.nat{i <= j && j <= FStar.Seq.Base.length hs} ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.forall_seq hs
i
j
(fun hp ->
Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h0 hp /\
FStar.Monotonic.HyperHeap.includes mtr
(Rgl?.region_of (MerkleTree.Low.Datastructures.hreg hsz) hp)) /\
LowStar.Monotonic.Buffer.loc_disjoint dl
(LowStar.Monotonic.Buffer.loc_all_regions_from false mtr) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1)
(ensures
LowStar.Vector.forall_seq hs
i
j
(fun hp ->
Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h1 hp /\
FStar.Monotonic.HyperHeap.includes mtr
(Rgl?.region_of (MerkleTree.Low.Datastructures.hreg hsz) hp)))
(decreases j) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims.b2t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims.op_Equality",
"Prims.bool",
"MerkleTree.Low.path_safe_preserved_",
"FStar.Integers.op_Subtraction",
"Prims.unit",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Seq.Base.index",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Regional.__proj__Rgl__item__region_of"
] | [
"recursion"
] | false | false | true | false | false | let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
| if i = j
then ()
else
(assert (loc_includes (B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1) | false |
MerkleTree.Low.fst | MerkleTree.Low.path_safe_preserved | val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p)) | val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p)) | let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1 | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 54,
"end_line": 1203,
"start_col": 0,
"start_line": 1198
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.path_p ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.path_safe h0 mtr p /\
LowStar.Monotonic.Buffer.loc_disjoint dl (MerkleTree.Low.path_loc p) /\
LowStar.Monotonic.Buffer.loc_disjoint dl
(LowStar.Monotonic.Buffer.loc_all_regions_from false mtr) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1) (ensures MerkleTree.Low.path_safe h1 mtr p) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.path_safe_preserved_",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.as_seq",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.phashes",
"FStar.Seq.Base.length",
"Prims.unit",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"MerkleTree.Low.path_loc",
"LowStar.Vector.loc_vector",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | true | false | true | false | false | let path_safe_preserved mtr p dl h0 h1 =
| assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_ mtr
(V.as_seq h0 (phashes h0 p))
0
(S.length (V.as_seq h0 (phashes h0 p)))
dl
h0
h1 | false |
Test.NoHeap.fst | Test.NoHeap.main | val main: unit -> Stack Int32.t (fun _ -> True) (fun _ _ _ -> True) | val main: unit -> Stack Int32.t (fun _ -> True) (fun _ _ _ -> True) | let main () =
let open Test.Vectors in
C.String.print !$"Start WASM tests\n";
test_hash hash_vectors_low;
test_hmac hmac_vectors_low;
test_hkdf hkdf_vectors_low;
test_chacha20 chacha20_vectors_low;
test_poly1305 ();
test_curve25519 ();
test_chacha20poly1305 ();
C.String.print !$"End WASM tests\n";
0l | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 4,
"end_line": 332,
"start_col": 0,
"start_line": 321
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a
val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end
let test_hmac = test_many !$"HMAC" test_one_hmac
/// HKDF
/// ----
val test_one_hkdf: hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hkdf vec =
let ha, (LB ikmlen ikm), (LB saltlen salt),
(LB infolen info), (LB prklen expected_prk), (LB okmlen expected_okm) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if prklen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected PRK\n"
else if okmlen > 255ul * Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong output length\n"
else if not (keysized ha saltlen) then
failwith "Saltlen is not keysized\n"
else if not (keysized ha prklen) then
failwith "Prklen is not keysized\n"
else if not (ikmlen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "ikmlen is too large\n"
else if not (infolen <= 0xfffffffful -
Hacl.Hash.Definitions.(block_len ha + hash_len ha + 1ul)) then
failwith "infolen is too large\n"
else if EverCrypt.HMAC.is_supported_alg ha then begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v saltlen));
assert (v ikmlen + Spec.Hash.Definitions.block_length ha < pow2 32);
assert Spec.Hash.Definitions.(hash_length ha
+ v infolen + 1 + block_length ha < pow2 32);
B.recall salt;
B.recall ikm;
B.recall info;
let str = string_of_alg ha in
let computed_prk = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HKDF.extract ha computed_prk salt saltlen ikm ikmlen;
B.recall expected_prk;
TestLib.compare_and_print str expected_prk computed_prk (Hacl.Hash.Definitions.hash_len ha);
let computed_okm = B.alloca 0uy (okmlen + 1ul) in
let computed_okm = B.sub computed_okm 0ul okmlen in
EverCrypt.HKDF.expand ha computed_okm computed_prk prklen info infolen okmlen;
B.recall expected_okm;
TestLib.compare_and_print str expected_okm computed_okm okmlen;
pop_frame()
end
let test_hkdf = test_many !$"HKDF" test_one_hkdf
/// Chacha20
/// --------
friend Lib.IntTypes
let test_one_chacha20 (v: chacha20_vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let (LB key_len key), (LB iv_len iv), ctr, (LB plain_len plain), (LB cipher_len cipher) = v in
if cipher_len = 0xfffffffful then
failwith "Cipher too long"
else if cipher_len <> plain_len then
failwith "Cipher len and plain len don't match"
else if key_len <> 32ul then
failwith "invalid key len"
else if iv_len <> 12ul then
failwith "invalid iv len"
else if not (ctr <= 0xfffffffful - cipher_len / 64ul) then
failwith "invalid len"
else begin
push_frame ();
B.recall key;
B.recall iv;
B.recall plain;
B.recall cipher;
let cipher' = B.alloca 0uy (cipher_len + 1ul) in
let cipher' = B.sub cipher' 0ul cipher_len in
EverCrypt.Cipher.chacha20 plain_len cipher' plain key iv ctr;
TestLib.compare_and_print !$"of ChaCha20 message" cipher cipher' cipher_len;
pop_frame ()
end
let test_chacha20 = test_many !$"CHACHA20" test_one_chacha20
/// Using generated vectors in the vectors/ directory
/// =================================================
/// Poly1305
/// --------
let test_one_poly1305 (v: Test.Vectors.Poly1305.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Poly1305 in
let Vector tag tag_len key key_len input input_len = v in
push_frame ();
if not (4294967295ul `U32.sub` 16ul `U32.gte` input_len)
then
failwith "Error: skipping a test_poly1305 instance because bounds do not hold\n"
else begin
B.recall key;
B.recall tag;
B.recall input;
let h0 = get () in
let dst = B.alloca 0uy 16ul in
let h1 = get () in
B.recall input;
B.recall key;
B.recall tag;
if key_len = 32ul then
EverCrypt.Poly1305.mac dst input input_len key;
B.recall tag;
if tag_len = 16ul then
TestLib.compare_and_print !$"Poly1305" tag dst 16ul
end;
pop_frame ()
let test_poly1305 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"poly1305" test_one_poly1305 Test.Vectors.Poly1305.(LB vectors_len vectors)
/// Curve25519
/// ----------
let test_one_curve25519 (v: Test.Vectors.Curve25519.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Curve25519 in
let Vector result result_len public public_len private_ private__len valid = v in
push_frame ();
B.recall result;
B.recall public;
B.recall private_;
let h0 = get () in
let dst = B.alloca 0uy 32ul in
let h1 = get () in
B.recall result;
B.recall public;
B.recall private_;
if public_len = 32ul && private__len = 32ul then
EverCrypt.Curve25519.scalarmult dst private_ public;
B.recall result;
if result_len = 32ul && valid then
TestLib.compare_and_print !$"Curve25519" result dst 32ul;
pop_frame ()
let test_curve25519 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"curve25519" test_one_curve25519 Test.Vectors.Curve25519.(LB vectors_len vectors)
/// Chacha20-Poly1305
/// -----------------
#push-options "--z3rlimit 32"
let test_one_chacha20poly1305 (v: Test.Vectors.Chacha20Poly1305.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let Test.Vectors.Chacha20Poly1305.Vector cipher_and_tag cipher_and_tag_len plain plain_len aad aad_len nonce nonce_len key key_len = v in
if not (key_len = 32ul)
then failwith "chacha20poly1305: not (key_len = 32ul)"
else if not (nonce_len = 12ul)
then failwith "chacha20poly1305: not (nonce_len = 12ul)"
else if not ((4294967295ul `U32.sub` 16ul) `U32.gte` plain_len)
then failwith "chacha20poly1305: not ((4294967295ul `U32.sub` 16ul) `U32.gte` plain_len)"
else if not ((plain_len `U32.div` 64ul) `U32.lte` (4294967295ul `U32.sub` aad_len))
then failwith "chacha20poly1305: not ((plain_len `U32.div` 64ul) `U32.lte` (4294967295ul `U32.sub` aad_len))"
else if not (cipher_and_tag_len = plain_len `U32.add` 16ul)
then failwith "chacha20poly1305: not (cipher_and_tag_len = plain_len `U32.add` 16ul)"
else begin
B.recall plain;
B.recall cipher_and_tag;
B.recall aad;
B.recall nonce;
B.recall key;
push_frame ();
let tmp = B.alloca 0uy (plain_len `U32.add` 16ul) in
let tmp_msg' = B.sub tmp 0ul plain_len in
let tag' = B.sub tmp plain_len 16ul in
EverCrypt.Chacha20Poly1305.aead_encrypt key nonce aad_len aad plain_len plain tmp_msg' tag';
TestLib.compare_and_print !$"chacha20poly1305 cipher and tag" cipher_and_tag tmp cipher_and_tag_len;
let cipher = B.sub cipher_and_tag 0ul plain_len in
let tag = B.sub cipher_and_tag plain_len 16ul in
let res = EverCrypt.Chacha20Poly1305.aead_decrypt key nonce aad_len aad plain_len tmp_msg' cipher tag in
if res = 0ul
then
TestLib.compare_and_print !$"chacha20poly1305 plain" plain tmp_msg' plain_len
else
failwith "Failure: chacha20poly1305 aead_decrypt returned nonzero value";
pop_frame ()
end
#pop-options
let test_chacha20poly1305 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"chacha20poly1305" test_one_chacha20poly1305 Test.Vectors.Chacha20Poly1305.(LB vectors_len vectors)
/// A main for WASM tests only (ignored by Test)
/// ============================================ | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | _: Prims.unit -> FStar.HyperStack.ST.Stack FStar.Int32.t | FStar.HyperStack.ST.Stack | [] | [] | [
"Prims.unit",
"FStar.Int32.__int_to_t",
"FStar.Int32.t",
"C.String.print",
"Test.NoHeap.op_Bang_Dollar",
"Test.NoHeap.test_chacha20poly1305",
"Test.NoHeap.test_curve25519",
"Test.NoHeap.test_poly1305",
"Test.NoHeap.test_chacha20",
"Test.Vectors.chacha20_vectors_low",
"Test.NoHeap.test_hkdf",
"Test.Vectors.hkdf_vectors_low",
"Test.NoHeap.test_hmac",
"Test.Vectors.hmac_vectors_low",
"Test.NoHeap.test_hash",
"Test.Vectors.hash_vectors_low"
] | [] | false | true | false | false | false | let main () =
| let open Test.Vectors in
C.String.print !$"Start WASM tests\n";
test_hash hash_vectors_low;
test_hmac hmac_vectors_low;
test_hkdf hkdf_vectors_low;
test_chacha20 chacha20_vectors_low;
test_poly1305 ();
test_curve25519 ();
test_chacha20poly1305 ();
C.String.print !$"End WASM tests\n";
0l | false |
Test.NoHeap.fst | Test.NoHeap.test_one_poly1305 | val test_one_poly1305 (v: Test.Vectors.Poly1305.vector)
: Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_one_poly1305 (v: Test.Vectors.Poly1305.vector)
: Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_one_poly1305 (v: Test.Vectors.Poly1305.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Poly1305 in
let Vector tag tag_len key key_len input input_len = v in
push_frame ();
if not (4294967295ul `U32.sub` 16ul `U32.gte` input_len)
then
failwith "Error: skipping a test_poly1305 instance because bounds do not hold\n"
else begin
B.recall key;
B.recall tag;
B.recall input;
let h0 = get () in
let dst = B.alloca 0uy 16ul in
let h1 = get () in
B.recall input;
B.recall key;
B.recall tag;
if key_len = 32ul then
EverCrypt.Poly1305.mac dst input input_len key;
B.recall tag;
if tag_len = 16ul then
TestLib.compare_and_print !$"Poly1305" tag dst 16ul
end;
pop_frame () | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 241,
"start_col": 0,
"start_line": 218
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a
val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end
let test_hmac = test_many !$"HMAC" test_one_hmac
/// HKDF
/// ----
val test_one_hkdf: hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hkdf vec =
let ha, (LB ikmlen ikm), (LB saltlen salt),
(LB infolen info), (LB prklen expected_prk), (LB okmlen expected_okm) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if prklen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected PRK\n"
else if okmlen > 255ul * Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong output length\n"
else if not (keysized ha saltlen) then
failwith "Saltlen is not keysized\n"
else if not (keysized ha prklen) then
failwith "Prklen is not keysized\n"
else if not (ikmlen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "ikmlen is too large\n"
else if not (infolen <= 0xfffffffful -
Hacl.Hash.Definitions.(block_len ha + hash_len ha + 1ul)) then
failwith "infolen is too large\n"
else if EverCrypt.HMAC.is_supported_alg ha then begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v saltlen));
assert (v ikmlen + Spec.Hash.Definitions.block_length ha < pow2 32);
assert Spec.Hash.Definitions.(hash_length ha
+ v infolen + 1 + block_length ha < pow2 32);
B.recall salt;
B.recall ikm;
B.recall info;
let str = string_of_alg ha in
let computed_prk = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HKDF.extract ha computed_prk salt saltlen ikm ikmlen;
B.recall expected_prk;
TestLib.compare_and_print str expected_prk computed_prk (Hacl.Hash.Definitions.hash_len ha);
let computed_okm = B.alloca 0uy (okmlen + 1ul) in
let computed_okm = B.sub computed_okm 0ul okmlen in
EverCrypt.HKDF.expand ha computed_okm computed_prk prklen info infolen okmlen;
B.recall expected_okm;
TestLib.compare_and_print str expected_okm computed_okm okmlen;
pop_frame()
end
let test_hkdf = test_many !$"HKDF" test_one_hkdf
/// Chacha20
/// --------
friend Lib.IntTypes
let test_one_chacha20 (v: chacha20_vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let (LB key_len key), (LB iv_len iv), ctr, (LB plain_len plain), (LB cipher_len cipher) = v in
if cipher_len = 0xfffffffful then
failwith "Cipher too long"
else if cipher_len <> plain_len then
failwith "Cipher len and plain len don't match"
else if key_len <> 32ul then
failwith "invalid key len"
else if iv_len <> 12ul then
failwith "invalid iv len"
else if not (ctr <= 0xfffffffful - cipher_len / 64ul) then
failwith "invalid len"
else begin
push_frame ();
B.recall key;
B.recall iv;
B.recall plain;
B.recall cipher;
let cipher' = B.alloca 0uy (cipher_len + 1ul) in
let cipher' = B.sub cipher' 0ul cipher_len in
EverCrypt.Cipher.chacha20 plain_len cipher' plain key iv ctr;
TestLib.compare_and_print !$"of ChaCha20 message" cipher cipher' cipher_len;
pop_frame ()
end
let test_chacha20 = test_many !$"CHACHA20" test_one_chacha20
/// Using generated vectors in the vectors/ directory
/// =================================================
/// Poly1305
/// -------- | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: Test.Vectors.Poly1305.vector -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Test.Vectors.Poly1305.vector",
"LowStar.Buffer.buffer",
"FStar.UInt8.t",
"LowStar.Monotonic.Buffer.recallable",
"LowStar.Buffer.trivial_preorder",
"FStar.UInt32.t",
"Prims.b2t",
"Prims.op_Equality",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"LowStar.Monotonic.Buffer.length",
"FStar.UInt32.v",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Prims.op_Negation",
"FStar.UInt32.gte",
"FStar.UInt32.sub",
"FStar.UInt32.__uint_to_t",
"Test.NoHeap.failwith",
"Prims.bool",
"TestLib.compare_and_print",
"Test.NoHeap.op_Bang_Dollar",
"LowStar.Monotonic.Buffer.recall",
"EverCrypt.Poly1305.mac",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"FStar.UInt32.uint_to_t",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Buffer.alloca",
"FStar.UInt8.__uint_to_t",
"FStar.HyperStack.ST.push_frame",
"Prims.l_True"
] | [] | false | true | false | false | false | let test_one_poly1305 (v: Test.Vectors.Poly1305.vector)
: Stack unit (fun _ -> True) (fun _ _ _ -> True) =
| let open Test.Vectors.Poly1305 in
let Vector tag tag_len key key_len input input_len = v in
push_frame ();
if not ((4294967295ul `U32.sub` 16ul) `U32.gte` input_len)
then failwith "Error: skipping a test_poly1305 instance because bounds do not hold\n"
else
(B.recall key;
B.recall tag;
B.recall input;
let h0 = get () in
let dst = B.alloca 0uy 16ul in
let h1 = get () in
B.recall input;
B.recall key;
B.recall tag;
if key_len = 32ul then EverCrypt.Poly1305.mac dst input input_len key;
B.recall tag;
if tag_len = 16ul then TestLib.compare_and_print !$"Poly1305" tag dst 16ul);
pop_frame () | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_flush_to_pre_nst | val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool | val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool | let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 19,
"end_line": 2423,
"start_col": 0,
"start_line": 2419
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtv: MerkleTree.Low.merkle_tree -> idx: MerkleTree.Low.offset_t -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.merkle_tree",
"MerkleTree.Low.offset_t",
"Prims.op_AmpAmp",
"MerkleTree.Low.offsets_connect",
"MerkleTree.Low.__proj__MT__item__offset",
"FStar.Integers.op_Greater_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.__proj__MT__item__i",
"FStar.Integers.op_Less",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.Low.index_t",
"MerkleTree.Low.split_offset",
"Prims.bool"
] | [] | false | false | false | true | false | let mt_flush_to_pre_nst mtv idx =
| offsets_connect (MT?.offset mtv) idx &&
([@@ inline_let ]let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv && idx < MT?.j mtv) | false |
MerkleTree.Low.fst | MerkleTree.Low.clear_path | val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty)) | val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty)) | let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv)) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 60,
"end_line": 1292,
"start_col": 0,
"start_line": 1290
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\ | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.path_p -> FStar.HyperStack.ST.ST Prims.unit | FStar.HyperStack.ST.ST | [] | [] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.BufferOps.op_Star_Equals",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.Path",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Vector.clear",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hashes",
"Prims.unit",
"LowStar.BufferOps.op_Bang_Star"
] | [] | false | true | false | false | false | let clear_path mtr p =
| let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv)) | false |
MerkleTree.Low.fst | MerkleTree.Low.free_path | val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1)) | val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1)) | let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 10,
"end_line": 1306,
"start_col": 0,
"start_line": 1303
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 -> | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | p: MerkleTree.Low.path_p -> FStar.HyperStack.ST.ST Prims.unit | FStar.HyperStack.ST.ST | [] | [] | [
"MerkleTree.Low.path_p",
"LowStar.Monotonic.Buffer.free",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"Prims.unit",
"LowStar.Vector.free",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"MerkleTree.Low.__proj__Path__item__hashes",
"LowStar.BufferOps.op_Bang_Star"
] | [] | false | true | false | false | false | let free_path p =
| let pv = !*p in
V.free (Path?.hashes pv);
B.free p | false |
MerkleTree.Low.fst | MerkleTree.Low.lift_path_ | val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j) | val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j) | let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1)))) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 63,
"end_line": 1094,
"start_col": 0,
"start_line": 1091
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} -> | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
hs: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j:
FStar.Integers.nat
{ i <= j /\ j <= FStar.Seq.Base.length hs /\
LowStar.Vector.forall_seq hs
i
j
(fun hp -> Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h hp) }
-> Prims.GTot (hp: MerkleTree.New.High.path{FStar.Seq.Base.length hp = j - i}) | Prims.GTot | [
"sometrivial",
""
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims.l_and",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"LowStar.Vector.forall_seq",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"MerkleTree.Low.Datastructures.hreg",
"Prims.op_Equality",
"FStar.Seq.Base.empty",
"MerkleTree.New.High.hash",
"FStar.UInt32.v",
"Prims.bool",
"FStar.Seq.Properties.snoc",
"MerkleTree.Low.lift_path_",
"FStar.Integers.op_Subtraction",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"FStar.Seq.Base.index",
"MerkleTree.New.High.path",
"Prims.int"
] | [
"recursion"
] | false | false | false | false | false | let rec lift_path_ #hsz h hs i j =
| if i = j
then S.empty
else (S.snoc (lift_path_ h hs i (j - 1)) (Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1)))) | false |
MerkleTree.Low.fst | MerkleTree.Low.path_safe_init_preserved | val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul)) | val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul)) | let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p))) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 66,
"end_line": 1216,
"start_col": 0,
"start_line": 1214
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\ | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.path_p ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.path_safe h0 mtr p /\
LowStar.Vector.size_of (MerkleTree.Low.phashes h0 p) = 0ul /\
LowStar.Monotonic.Buffer.loc_disjoint dl (MerkleTree.Low.path_loc p) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1)
(ensures
MerkleTree.Low.path_safe h1 mtr p /\
LowStar.Vector.size_of (MerkleTree.Low.phashes h1 p) = 0ul) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"MerkleTree.Low.path_loc",
"LowStar.Vector.loc_vector",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.phashes",
"Prims.unit",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | true | false | true | false | false | let path_safe_init_preserved mtr p dl h0 h1 =
| assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p))) | false |
MerkleTree.Low.fst | MerkleTree.Low.lift_path_index | val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i))) | val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i))) | let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 53,
"end_line": 1133,
"start_col": 0,
"start_line": 1131
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) == | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.path_p ->
i: LowStar.Vector.uint32_t
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.path_safe h mtr p /\ i < LowStar.Vector.size_of (MerkleTree.Low.phashes h p))
(ensures
(let hsz = Path?.hash_size (LowStar.Monotonic.Buffer.get h p 0) in
Rgl?.r_repr (MerkleTree.Low.Datastructures.hreg hsz)
h
(LowStar.Vector.get h (MerkleTree.Low.phashes h p) i) ==
FStar.Seq.Base.index (MerkleTree.Low.lift_path h mtr p) (FStar.UInt32.v i))) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.lift_path_index_",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.as_seq",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.phashes",
"FStar.Seq.Base.length",
"FStar.UInt32.v",
"Prims.unit"
] | [] | true | false | true | false | false | let lift_path_index h mtr p i =
| lift_path_index_ h (V.as_seq h (phashes h p)) 0 (S.length (V.as_seq h (phashes h p))) (U32.v i) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_flush_pre_nst | val mt_flush_pre_nst: mt:merkle_tree -> Tot bool | val mt_flush_pre_nst: mt:merkle_tree -> Tot bool | let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 45,
"end_line": 2497,
"start_col": 0,
"start_line": 2497
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mt: MerkleTree.Low.merkle_tree -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.merkle_tree",
"FStar.Integers.op_Greater",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.Low.__proj__MT__item__i",
"Prims.bool"
] | [] | false | false | false | true | false | let mt_flush_pre_nst mt =
| MT?.j mt > MT?.i mt | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_safe_elts_spec | val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv)) | val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv)) | let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 60,
"end_line": 273,
"start_col": 0,
"start_line": 271
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
i: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{j >= i}
-> FStar.Pervasives.Lemma
(requires LowStar.RVector.rv_inv h hs /\ MerkleTree.Low.mt_safe_elts h lv hs i j)
(ensures
MerkleTree.New.High.hs_wf_elts (FStar.UInt32.v lv)
(LowStar.RVector.as_seq h hs)
(FStar.UInt32.v i)
(FStar.UInt32.v j))
(decreases 32 - FStar.UInt32.v lv) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.index_t",
"FStar.Integers.op_Greater_Equals",
"Prims.bool",
"MerkleTree.Low.mt_safe_elts_spec",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"FStar.Integers.op_Slash",
"Prims.unit"
] | [
"recursion"
] | false | false | true | false | false | let rec mt_safe_elts_spec #_ h lv hs i j =
| if lv = merkle_tree_size_lg then () else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_create_custom | val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init))) | val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init))) | let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 4,
"end_line": 1043,
"start_col": 0,
"start_line": 1038
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 40,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
hsz: MerkleTree.Low.Datastructures.hash_size_t ->
hash_spec: FStar.Ghost.erased MerkleTree.Spec.hash_fun_t ->
r: FStar.HyperStack.ST.erid ->
init: MerkleTree.Low.Datastructures.hash ->
hash_fun: MerkleTree.Low.Hashfunctions.hash_fun_t
-> FStar.HyperStack.ST.ST MerkleTree.Low.mt_p | FStar.HyperStack.ST.ST | [] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Ghost.erased",
"MerkleTree.Spec.hash_fun_t",
"FStar.UInt32.v",
"FStar.HyperStack.ST.erid",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Hashfunctions.hash_fun_t",
"MerkleTree.Low.mt_p",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Prims.unit",
"MerkleTree.Low.mt_insert",
"FStar.Ghost.hide",
"MerkleTree.Low.create_empty_mt"
] | [] | false | true | false | false | false | let mt_create_custom hsz hash_spec r init hash_fun =
| let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt | false |
MerkleTree.Low.fst | MerkleTree.Low.lift_path | val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))}) | val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))}) | let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 43,
"end_line": 1103,
"start_col": 0,
"start_line": 1101
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} -> | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
mtr: FStar.Monotonic.HyperHeap.rid ->
p:
MerkleTree.Low.path_p
{ MerkleTree.Low.path_safe h mtr p /\
Path?.hash_size (LowStar.Monotonic.Buffer.get h p 0) = hsz }
-> Prims.GTot
(hp:
MerkleTree.New.High.path
{ FStar.Seq.Base.length hp =
FStar.UInt32.v (LowStar.Vector.size_of (MerkleTree.Low.phashes h p)) }) | Prims.GTot | [
"sometrivial"
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"Prims.l_and",
"MerkleTree.Low.path_safe",
"Prims.b2t",
"Prims.op_Equality",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.lift_path_",
"LowStar.Vector.as_seq",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.phashes",
"FStar.Seq.Base.length",
"MerkleTree.New.High.path",
"FStar.UInt32.v",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"MerkleTree.New.High.hash",
"LowStar.Vector.size_of"
] | [] | false | false | false | false | false | let lift_path #hsz h mtr p =
| lift_path_ h (V.as_seq h (phashes h p)) 0 (S.length (V.as_seq h (phashes h p))) | false |
MerkleTree.Low.fst | MerkleTree.Low.init_path | val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty)) | val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty)) | let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 57,
"end_line": 1278,
"start_col": 0,
"start_line": 1276
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\ | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
hsz: MerkleTree.Low.Datastructures.hash_size_t ->
mtr: FStar.Monotonic.HyperHeap.rid ->
r: FStar.HyperStack.ST.erid
-> FStar.HyperStack.ST.ST MerkleTree.Low.path_p | FStar.HyperStack.ST.ST | [] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"FStar.HyperStack.ST.erid",
"LowStar.Buffer.malloc",
"MerkleTree.Low.path",
"FStar.UInt32.__uint_to_t",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.UInt32.v",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.freeable",
"MerkleTree.Low.Path",
"LowStar.Vector.vector",
"MerkleTree.Low.Datastructures.hash",
"LowStar.Regional.rg_alloc",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.path_p",
"FStar.HyperStack.ST.new_region"
] | [] | false | true | false | false | false | let init_path hsz mtr r =
| let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_retract_to_pre_nst | val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool | val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool | let mt_retract_to_pre_nst mtv r =
offsets_connect (MT?.offset mtv) r &&
([@inline_let] let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 35,
"end_line": 2749,
"start_col": 0,
"start_line": 2746
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo
#pop-options
/// Retraction
private
val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
private
let rec mt_retract_to_ #hsz hs lv i s j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs then ()
else begin
// 1) Retract hashes at level `lv`.
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get() in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs then
begin
assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
// 3-0) Memory safety brought from the postcondition of the recursion
let hh3 = HST.get () in
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs) (U32.v lv + 1)
(U32.v i / 2) (U32.v s / 2) (U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
else begin
let hh3 = HST.get() in
assert ((modifies (loc_union
(RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0 hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_
(RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
end
#pop-options
private inline_for_extraction | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtv: MerkleTree.Low.merkle_tree -> r: MerkleTree.Low.offset_t -> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.merkle_tree",
"MerkleTree.Low.offset_t",
"Prims.op_AmpAmp",
"MerkleTree.Low.offsets_connect",
"MerkleTree.Low.__proj__MT__item__offset",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.__proj__MT__item__i",
"FStar.Integers.op_Less",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.Low.index_t",
"MerkleTree.Low.split_offset",
"Prims.bool"
] | [] | false | false | false | true | false | let mt_retract_to_pre_nst mtv r =
| offsets_connect (MT?.offset mtv) r &&
([@@ inline_let ]let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv) | false |
MerkleTree.Low.fst | MerkleTree.Low.lift_path_eq | val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j))) | val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j))) | let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i)) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 82,
"end_line": 1161,
"start_col": 0,
"start_line": 1145
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
hs1: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
hs2: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j: FStar.Integers.nat
-> FStar.Pervasives.Lemma
(requires
i <= j /\ j <= FStar.Seq.Base.length hs1 /\ j <= FStar.Seq.Base.length hs2 /\
FStar.Seq.Base.equal (FStar.Seq.Base.slice hs1 i j) (FStar.Seq.Base.slice hs2 i j) /\
LowStar.Vector.forall_seq hs1
i
j
(fun hp -> Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h hp) /\
LowStar.Vector.forall_seq hs2
i
j
(fun hp -> Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h hp))
(ensures
FStar.Seq.Base.equal (MerkleTree.Low.lift_path_ h hs1 i j)
(MerkleTree.Low.lift_path_ h hs2 i j)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims._assert",
"Prims.l_Forall",
"Prims.b2t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Integers.op_Less",
"Prims.eq2",
"FStar.Seq.Base.index",
"FStar.Seq.Base.slice",
"FStar.Integers.op_Subtraction",
"Prims.unit",
"Spec.Hash.Definitions.bytes",
"Prims.l_or",
"Prims.op_Equality",
"Prims.int",
"Prims.op_GreaterThanOrEqual",
"Prims.op_GreaterThan",
"FStar.Seq.Base.length",
"Lib.IntTypes.uint8",
"FStar.UInt32.v",
"MerkleTree.New.High.hash",
"MerkleTree.Low.lift_path_",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Integers.op_Plus"
] | [] | false | false | true | false | false | let lift_path_eq #hsz h hs1 hs2 i j =
| assert (forall (k: nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) == Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k: nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) == Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k: nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k == Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k: nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k == Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k: nat{k < j - i}). S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k: nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i)) | false |
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.u_two | val u_two : FStar.Stubs.Reflection.Types.universe | let u_two = RT.(u_succ (u_succ u_zero)) | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 39,
"end_line": 25,
"start_col": 0,
"start_line": 25
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | FStar.Stubs.Reflection.Types.universe | Prims.Tot | [
"total"
] | [] | [
"FStar.Reflection.Typing.u_succ",
"FStar.Reflection.Typing.u_zero"
] | [] | false | false | false | true | false | let u_two =
| let open RT in u_succ (u_succ u_zero) | false |
|
MerkleTree.Low.fst | MerkleTree.Low.mt_get_path_pre_nst | val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool | val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool | let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 36,
"end_line": 1990,
"start_col": 0,
"start_line": 1985
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) -> | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mtv: MerkleTree.Low.merkle_tree ->
idx: MerkleTree.Low.offset_t ->
p: MerkleTree.Low.path ->
root: MerkleTree.Low.Datastructures.hash
-> Prims.bool | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.merkle_tree",
"MerkleTree.Low.offset_t",
"MerkleTree.Low.path",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__MT__item__hash_size",
"Prims.op_AmpAmp",
"MerkleTree.Low.offsets_connect",
"MerkleTree.Low.__proj__MT__item__offset",
"Prims.op_Equality",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.__proj__Path__item__hash_size",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.__proj__MT__item__i",
"FStar.Integers.op_Less",
"MerkleTree.Low.__proj__MT__item__j",
"FStar.UInt32.t",
"LowStar.Vector.size_of",
"MerkleTree.Low.__proj__Path__item__hashes",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.index_t",
"MerkleTree.Low.split_offset",
"Prims.bool"
] | [] | false | false | false | false | false | let mt_get_path_pre_nst mtv idx p root =
| offsets_connect (MT?.offset mtv) idx && Path?.hash_size p = MT?.hash_size mtv &&
([@@ inline_let ]let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv && V.size_of (Path?.hashes p) = 0ul) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_get_root_pre | val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True)) | val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True)) | let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 27,
"end_line": 1532,
"start_col": 0,
"start_line": 1527
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mt: MerkleTree.Low.const_mt_p -> rt: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.bool | FStar.HyperStack.ST.ST | [] | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"MerkleTree.Low.mt_get_root_pre_nst",
"Prims.unit",
"Prims._assert",
"Prims.b2t",
"Prims.op_Equality",
"MerkleTree.Low.__proj__MT__item__hash_size",
"Prims.bool",
"MerkleTree.Low.merkle_tree",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | false | true | false | false | false | let mt_get_root_pre #hsz mt rt =
| let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt | false |
MerkleTree.Low.fst | MerkleTree.Low.path_preserved | val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a)) | val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a)) | let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1 | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 12,
"end_line": 1263,
"start_col": 0,
"start_line": 1258
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.path_p ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.path_safe h0 mtr p /\
LowStar.Monotonic.Buffer.loc_disjoint dl (MerkleTree.Low.path_loc p) /\
LowStar.Monotonic.Buffer.loc_disjoint dl
(LowStar.Monotonic.Buffer.loc_all_regions_from false mtr) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1)
(ensures
(MerkleTree.Low.path_safe_preserved mtr p dl h0 h1;
let hsz0 = Path?.hash_size (LowStar.Monotonic.Buffer.get h0 p 0) in
let hsz1 = Path?.hash_size (LowStar.Monotonic.Buffer.get h1 p 0) in
let b = MerkleTree.Low.lift_path h0 mtr p in
let a = MerkleTree.Low.lift_path h1 mtr p in
hsz0 = hsz1 /\ FStar.Seq.Base.equal b a)) | FStar.Pervasives.Lemma | [
"lemma"
] | [] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.path_preserved_",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.as_seq",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.phashes",
"FStar.Seq.Base.length",
"Prims.unit",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"MerkleTree.Low.path_loc",
"LowStar.Vector.loc_vector",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | true | false | true | false | false | let path_preserved mtr p dl h0 h1 =
| assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p)) 0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1 | false |
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.u_max_two | val u_max_two : u2: FStar.Stubs.Reflection.Types.universe -> FStar.Stubs.Reflection.Types.universe | let u_max_two u = (RT.u_max u_two u) | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 36,
"end_line": 26,
"start_col": 0,
"start_line": 26
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | u2: FStar.Stubs.Reflection.Types.universe -> FStar.Stubs.Reflection.Types.universe | Prims.Tot | [
"total"
] | [] | [
"FStar.Stubs.Reflection.Types.universe",
"FStar.Reflection.Typing.u_max",
"Pulse.Reflection.Util.u_two"
] | [] | false | false | false | true | false | let u_max_two u =
| (RT.u_max u_two u) | false |
|
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.pulse_lib_core | val pulse_lib_core : Prims.list Prims.string | let pulse_lib_core = ["Pulse"; "Lib"; "Core"] | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 45,
"end_line": 28,
"start_col": 0,
"start_line": 28
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u) | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.list Prims.string | Prims.Tot | [
"total"
] | [] | [
"Prims.Cons",
"Prims.string",
"Prims.Nil"
] | [] | false | false | false | true | false | let pulse_lib_core =
| ["Pulse"; "Lib"; "Core"] | false |
|
MerkleTree.Low.fst | MerkleTree.Low.mt_get_path_length | val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True)) | val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True)) | let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 29,
"end_line": 1786,
"start_col": 0,
"start_line": 1784
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.const_path_p
-> FStar.HyperStack.ST.ST LowStar.Vector.uint32_t | FStar.HyperStack.ST.ST | [] | [] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"MerkleTree.Low.__proj__Path__item__hashes",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.ConstBuffer.cast"
] | [] | false | true | false | false | false | let mt_get_path_length mtr p =
| let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd) | false |
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.mk_pulse_lib_core_lid | val mk_pulse_lib_core_lid : s: Prims.string -> Prims.list Prims.string | let mk_pulse_lib_core_lid s = pulse_lib_core@[s] | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 48,
"end_line": 29,
"start_col": 0,
"start_line": 29
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u) | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | s: Prims.string -> Prims.list Prims.string | Prims.Tot | [
"total"
] | [] | [
"Prims.string",
"FStar.List.Tot.Base.op_At",
"Pulse.Reflection.Util.pulse_lib_core",
"Prims.Cons",
"Prims.Nil",
"Prims.list"
] | [] | false | false | false | true | false | let mk_pulse_lib_core_lid s =
| pulse_lib_core @ [s] | false |
|
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.tun | val tun : FStar.Stubs.Reflection.Types.term | let tun = R.pack_ln R.Tv_Unknown | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 32,
"end_line": 31,
"start_col": 0,
"start_line": 31
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u)
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s] | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | FStar.Stubs.Reflection.Types.term | Prims.Tot | [
"total"
] | [] | [
"FStar.Stubs.Reflection.V2.Builtins.pack_ln",
"FStar.Stubs.Reflection.V2.Data.Tv_Unknown"
] | [] | false | false | false | true | false | let tun =
| R.pack_ln R.Tv_Unknown | false |
|
MerkleTree.Low.fst | MerkleTree.Low.mt_free | val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1)) | val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1)) | let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 11,
"end_line": 392,
"start_col": 0,
"start_line": 386
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1)) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mt: MerkleTree.Low.mt_p -> FStar.HyperStack.ST.ST Prims.unit | FStar.HyperStack.ST.ST | [] | [] | [
"MerkleTree.Low.mt_p",
"LowStar.Monotonic.Buffer.free",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"Prims.unit",
"LowStar.Regional.rg_free",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.Regional.regional",
"MerkleTree.Low.Datastructures.hreg",
"LowStar.RVector.free",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"LowStar.BufferOps.op_Bang_Star"
] | [] | false | true | false | false | false | let mt_free mt =
| let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@@ inline_let ]let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt | false |
Test.NoHeap.fst | Test.NoHeap.test_one_curve25519 | val test_one_curve25519 (v: Test.Vectors.Curve25519.vector)
: Stack unit (fun _ -> True) (fun _ _ _ -> True) | val test_one_curve25519 (v: Test.Vectors.Curve25519.vector)
: Stack unit (fun _ -> True) (fun _ _ _ -> True) | let test_one_curve25519 (v: Test.Vectors.Curve25519.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Curve25519 in
let Vector result result_len public public_len private_ private__len valid = v in
push_frame ();
B.recall result;
B.recall public;
B.recall private_;
let h0 = get () in
let dst = B.alloca 0uy 32ul in
let h1 = get () in
B.recall result;
B.recall public;
B.recall private_;
if public_len = 32ul && private__len = 32ul then
EverCrypt.Curve25519.scalarmult dst private_ public;
B.recall result;
if result_len = 32ul && valid then
TestLib.compare_and_print !$"Curve25519" result dst 32ul;
pop_frame () | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 14,
"end_line": 267,
"start_col": 0,
"start_line": 249
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300"
val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true)
let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end
let test_hash = test_many !$"Hashes" test_one_hash
/// HMAC
/// ----
let keysized (a:H.alg) (l: UInt32.t): Tot (b:bool{b ==> Spec.Agile.HMAC.keysized a (UInt32.v l) }) =
EverCrypt.Hash.uint32_fits_maxLength a l;
assert (v l `Spec.Hash.Definitions.less_than_max_input_length` a);
assert_norm (v 0xfffffffful = pow2 32 - 1);
l <= 0xfffffffful - Hacl.Hash.Definitions.block_len a
val test_one_hmac: hmac_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hmac vec =
let ha, (LB keylen key), (LB datalen data), (LB expectedlen expected) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if expectedlen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected tag\n"
else if not (keysized ha keylen) then
failwith "Keysized predicate not satisfied\n"
else if not (datalen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "Datalen predicate not satisfied\n"
else if EverCrypt.HMAC.is_supported_alg ha then
begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v keylen));
assert (v datalen + Spec.Hash.Definitions.block_length ha < pow2 32);
B.recall key;
B.recall data;
let computed = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HMAC.compute ha computed key keylen data datalen;
let str = string_of_alg ha in
B.recall expected;
TestLib.compare_and_print str expected computed (Hacl.Hash.Definitions.hash_len ha);
pop_frame()
end
let test_hmac = test_many !$"HMAC" test_one_hmac
/// HKDF
/// ----
val test_one_hkdf: hkdf_vector -> Stack unit (fun _ -> True) (fun _ _ _ -> True)
let test_one_hkdf vec =
let ha, (LB ikmlen ikm), (LB saltlen salt),
(LB infolen info), (LB prklen expected_prk), (LB okmlen expected_okm) = vec in
if Spec.Hash.Definitions.is_shake ha then
failwith "unsupported shake algorithm"
else if prklen <> Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong length of expected PRK\n"
else if okmlen > 255ul * Hacl.Hash.Definitions.hash_len ha then
failwith "Wrong output length\n"
else if not (keysized ha saltlen) then
failwith "Saltlen is not keysized\n"
else if not (keysized ha prklen) then
failwith "Prklen is not keysized\n"
else if not (ikmlen <= 0xfffffffful - Hacl.Hash.Definitions.block_len ha) then
failwith "ikmlen is too large\n"
else if not (infolen <= 0xfffffffful -
Hacl.Hash.Definitions.(block_len ha + hash_len ha + 1ul)) then
failwith "infolen is too large\n"
else if EverCrypt.HMAC.is_supported_alg ha then begin
push_frame();
assert (Spec.Agile.HMAC.keysized ha (v saltlen));
assert (v ikmlen + Spec.Hash.Definitions.block_length ha < pow2 32);
assert Spec.Hash.Definitions.(hash_length ha
+ v infolen + 1 + block_length ha < pow2 32);
B.recall salt;
B.recall ikm;
B.recall info;
let str = string_of_alg ha in
let computed_prk = B.alloca 0uy (Hacl.Hash.Definitions.hash_len ha) in
EverCrypt.HKDF.extract ha computed_prk salt saltlen ikm ikmlen;
B.recall expected_prk;
TestLib.compare_and_print str expected_prk computed_prk (Hacl.Hash.Definitions.hash_len ha);
let computed_okm = B.alloca 0uy (okmlen + 1ul) in
let computed_okm = B.sub computed_okm 0ul okmlen in
EverCrypt.HKDF.expand ha computed_okm computed_prk prklen info infolen okmlen;
B.recall expected_okm;
TestLib.compare_and_print str expected_okm computed_okm okmlen;
pop_frame()
end
let test_hkdf = test_many !$"HKDF" test_one_hkdf
/// Chacha20
/// --------
friend Lib.IntTypes
let test_one_chacha20 (v: chacha20_vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let (LB key_len key), (LB iv_len iv), ctr, (LB plain_len plain), (LB cipher_len cipher) = v in
if cipher_len = 0xfffffffful then
failwith "Cipher too long"
else if cipher_len <> plain_len then
failwith "Cipher len and plain len don't match"
else if key_len <> 32ul then
failwith "invalid key len"
else if iv_len <> 12ul then
failwith "invalid iv len"
else if not (ctr <= 0xfffffffful - cipher_len / 64ul) then
failwith "invalid len"
else begin
push_frame ();
B.recall key;
B.recall iv;
B.recall plain;
B.recall cipher;
let cipher' = B.alloca 0uy (cipher_len + 1ul) in
let cipher' = B.sub cipher' 0ul cipher_len in
EverCrypt.Cipher.chacha20 plain_len cipher' plain key iv ctr;
TestLib.compare_and_print !$"of ChaCha20 message" cipher cipher' cipher_len;
pop_frame ()
end
let test_chacha20 = test_many !$"CHACHA20" test_one_chacha20
/// Using generated vectors in the vectors/ directory
/// =================================================
/// Poly1305
/// --------
let test_one_poly1305 (v: Test.Vectors.Poly1305.vector): Stack unit (fun _ -> True) (fun _ _ _ -> True) =
let open Test.Vectors.Poly1305 in
let Vector tag tag_len key key_len input input_len = v in
push_frame ();
if not (4294967295ul `U32.sub` 16ul `U32.gte` input_len)
then
failwith "Error: skipping a test_poly1305 instance because bounds do not hold\n"
else begin
B.recall key;
B.recall tag;
B.recall input;
let h0 = get () in
let dst = B.alloca 0uy 16ul in
let h1 = get () in
B.recall input;
B.recall key;
B.recall tag;
if key_len = 32ul then
EverCrypt.Poly1305.mac dst input input_len key;
B.recall tag;
if tag_len = 16ul then
TestLib.compare_and_print !$"Poly1305" tag dst 16ul
end;
pop_frame ()
let test_poly1305 () : Stack unit (fun _ -> True) (fun _ _ _ -> True) =
test_many !$"poly1305" test_one_poly1305 Test.Vectors.Poly1305.(LB vectors_len vectors)
/// Curve25519
/// ---------- | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | v: Test.Vectors.Curve25519.vector -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Test.Vectors.Curve25519.vector",
"LowStar.Buffer.buffer",
"FStar.UInt8.t",
"LowStar.Monotonic.Buffer.recallable",
"LowStar.Buffer.trivial_preorder",
"FStar.UInt32.t",
"Prims.b2t",
"Prims.op_Equality",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"LowStar.Monotonic.Buffer.length",
"FStar.UInt32.v",
"Prims.bool",
"FStar.HyperStack.ST.pop_frame",
"Prims.unit",
"Prims.op_AmpAmp",
"FStar.UInt32.__uint_to_t",
"TestLib.compare_and_print",
"Test.NoHeap.op_Bang_Dollar",
"LowStar.Monotonic.Buffer.recall",
"EverCrypt.Curve25519.scalarmult",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.Monotonic.Buffer.mbuffer",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"FStar.UInt32.uint_to_t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Buffer.alloca",
"FStar.UInt8.__uint_to_t",
"FStar.HyperStack.ST.push_frame",
"Prims.l_True"
] | [] | false | true | false | false | false | let test_one_curve25519 (v: Test.Vectors.Curve25519.vector)
: Stack unit (fun _ -> True) (fun _ _ _ -> True) =
| let open Test.Vectors.Curve25519 in
let Vector result result_len public public_len private_ private__len valid = v in
push_frame ();
B.recall result;
B.recall public;
B.recall private_;
let h0 = get () in
let dst = B.alloca 0uy 32ul in
let h1 = get () in
B.recall result;
B.recall public;
B.recall private_;
if public_len = 32ul && private__len = 32ul then EverCrypt.Curve25519.scalarmult dst private_ public;
B.recall result;
if result_len = 32ul && valid then TestLib.compare_and_print !$"Curve25519" result dst 32ul;
pop_frame () | false |
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.bool_lid | val bool_lid : Prims.list Prims.string | let bool_lid = R.bool_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 25,
"end_line": 33,
"start_col": 0,
"start_line": 33
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u)
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.list Prims.string | Prims.Tot | [
"total"
] | [] | [
"FStar.Reflection.Const.bool_lid"
] | [] | false | false | false | true | false | let bool_lid =
| R.bool_lid | false |
|
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.unit_lid | val unit_lid : Prims.list Prims.string | let unit_lid = R.unit_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 25,
"end_line": 32,
"start_col": 0,
"start_line": 32
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u)
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s] | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.list Prims.string | Prims.Tot | [
"total"
] | [] | [
"FStar.Reflection.Const.unit_lid"
] | [] | false | false | false | true | false | let unit_lid =
| R.unit_lid | false |
|
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.int_lid | val int_lid : Prims.list Prims.string | let int_lid = R.int_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 24,
"end_line": 34,
"start_col": 0,
"start_line": 34
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u)
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.list Prims.string | Prims.Tot | [
"total"
] | [] | [
"FStar.Reflection.Const.int_lid"
] | [] | false | false | false | true | false | let int_lid =
| R.int_lid | false |
|
Test.NoHeap.fst | Test.NoHeap.test_one_hash | val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true) | val test_one_hash: hash_vector -> Stack unit (fun _ -> true) (fun _ _ _ -> true) | let test_one_hash vec =
let a, input, (LB expected_len expected), repeat = vec in
if Spec.Hash.Definitions.is_shake a then
failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen then
failwith "Wrong length of expected tag\n"
else if repeat = 0ul then
failwith "Repeat must be non-zero\n"
else if not (input_len <= (0xfffffffful - 1ul) / repeat) then
failwith "Repeated input is too large\n"
else begin
push_frame();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <= v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len
);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert (v total_input_len `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame()
end | {
"file_name": "providers/test/Test.NoHeap.fst",
"git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872",
"git_url": "https://github.com/project-everest/hacl-star.git",
"project_name": "hacl-star"
} | {
"end_col": 7,
"end_line": 90,
"start_col": 0,
"start_line": 52
} | module Test.NoHeap
module B = LowStar.Buffer
module L = Test.Lowstarize
module U32 = FStar.UInt32
open FStar.HyperStack.ST
open FStar.Integers
open LowStar.BufferOps
open Test.Lowstarize
let string_of_alg: Spec.Agile.Hash.hash_alg -> C.String.t =
let open C.String in
let open Spec.Agile.Hash in
function
| MD5 -> !$"MD5"
| SHA1 -> !$"SHA1"
| SHA2_224 -> !$"SHA2_224"
| SHA2_256 -> !$"SHA2_256"
| SHA2_384 -> !$"SHA2_384"
| SHA2_512 -> !$"SHA2_512"
| SHA3_224 -> !$"SHA3_224"
| SHA3_256 -> !$"SHA3_256"
| SHA3_384 -> !$"SHA3_384"
| SHA3_512 -> !$"SHA3_512"
| Blake2S -> !$"Blake2S"
| Blake2B -> !$"Blake2B"
| Shake128 -> !$"Shake128"
| Shake256 -> !$"Shake256"
/// A module that contains stack-only tests, suitable for both C and Wasm. Other
/// tests that may make arbitrary use of the heap are in Test and Test.Hash.
///
/// .. note::
/// Tests in this module are *VERIFIED*. Please keep it this way.
noextract unfold inline_for_extraction
let (!$) = C.String.((!$))
noextract unfold inline_for_extraction
let failwith = LowStar.Failure.failwith
/// Using meta-evaluated Low* test vectors from Test.Vectors
/// ========================================================
///
/// Hashes
/// ------
#set-options "--max_fuel 0 --max_ifuel 0 --z3rlimit 300" | {
"checked_file": "/",
"dependencies": [
"TestLib.fsti.checked",
"Test.Vectors.Poly1305.fst.checked",
"Test.Vectors.Curve25519.fst.checked",
"Test.Vectors.Chacha20Poly1305.fst.checked",
"Test.Vectors.fst.checked",
"Test.Lowstarize.fst.checked",
"Spec.Hash.Definitions.fst.checked",
"Spec.Agile.HMAC.fsti.checked",
"Spec.Agile.Hash.fsti.checked",
"prims.fst.checked",
"LowStar.Failure.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fst.checked",
"Hacl.Hash.Definitions.fst.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Integers.fst.checked",
"FStar.Int32.fsti.checked",
"FStar.HyperStack.ST.fsti.checked",
"EverCrypt.Poly1305.fsti.checked",
"EverCrypt.HMAC.fsti.checked",
"EverCrypt.HKDF.fsti.checked",
"EverCrypt.Hash.Incremental.fst.checked",
"EverCrypt.Hash.fsti.checked",
"EverCrypt.Curve25519.fsti.checked",
"EverCrypt.Cipher.fsti.checked",
"EverCrypt.Chacha20Poly1305.fsti.checked",
"C.String.fsti.checked",
"C.Loops.fst.checked"
],
"interface_file": true,
"source_file": "Test.NoHeap.fst"
} | [
{
"abbrev": false,
"full_module": "Test.Lowstarize",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": true,
"full_module": "Test.Lowstarize",
"short_module": "L"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "EverCrypt.Hash",
"short_module": "H"
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "Test",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | vec: Test.NoHeap.hash_vector -> FStar.HyperStack.ST.Stack Prims.unit | FStar.HyperStack.ST.Stack | [] | [] | [
"Test.NoHeap.hash_vector",
"Spec.Hash.Definitions.hash_alg",
"C.String.t",
"FStar.UInt32.t",
"LowStar.Buffer.buffer",
"FStar.UInt8.t",
"Prims.l_and",
"Prims.eq2",
"LowStar.Monotonic.Buffer.len",
"LowStar.Buffer.trivial_preorder",
"LowStar.Monotonic.Buffer.recallable",
"Spec.Hash.Definitions.is_shake",
"Test.NoHeap.failwith",
"Prims.unit",
"Prims.bool",
"Prims.op_disEquality",
"Prims.op_Equality",
"FStar.UInt32.__uint_to_t",
"Prims.op_Negation",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"FStar.Integers.op_Slash",
"FStar.Integers.op_Subtraction",
"FStar.HyperStack.ST.pop_frame",
"TestLib.compare_and_print",
"Test.NoHeap.string_of_alg",
"LowStar.Monotonic.Buffer.recall",
"EverCrypt.Hash.Incremental.hash",
"Prims._assert",
"Prims.b2t",
"Spec.Hash.Definitions.less_than_max_input_length",
"FStar.Integers.v",
"EverCrypt.Hash.uint32_fits_maxLength",
"C.Loops.for",
"FStar.Monotonic.HyperStack.mem",
"Prims.nat",
"LowStar.Monotonic.Buffer.live",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_buffer",
"Prims.op_LessThanOrEqual",
"FStar.UInt32.v",
"Prims.op_LessThan",
"C.String.memcpy",
"LowStar.Buffer.sub",
"FStar.Integers.op_Star",
"FStar.Ghost.hide",
"LowStar.Monotonic.Buffer.mbuffer",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Integers.op_Plus",
"FStar.HyperStack.ST.get",
"LowStar.Monotonic.Buffer.length",
"FStar.UInt32.add",
"FStar.UInt32.uint_to_t",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Buffer.alloca",
"FStar.UInt8.__uint_to_t",
"FStar.Integers.int_t",
"FStar.Integers.op_Less",
"Prims.pow2",
"FStar.Pervasives.assert_norm",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"Lib.IntTypes.uint8",
"FStar.HyperStack.ST.push_frame",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Prims.int",
"Prims.l_or",
"Lib.IntTypes.range",
"Prims.op_GreaterThanOrEqual",
"Prims.op_Subtraction",
"Prims.op_GreaterThan",
"Lib.IntTypes.v",
"Spec.Hash.Definitions.hash_length",
"Hacl.Hash.Definitions.hash_len",
"C.String.length",
"C.String.strlen"
] | [] | false | true | false | false | false | let test_one_hash vec =
| let a, input, LB expected_len expected, repeat = vec in
if Spec.Hash.Definitions.is_shake a
then failwith "unsupported shake algorithm"
else
let input_len = C.String.strlen input in
let tlen = Hacl.Hash.Definitions.hash_len a in
if expected_len <> tlen
then failwith "Wrong length of expected tag\n"
else
if repeat = 0ul
then failwith "Repeat must be non-zero\n"
else
if not (input_len <= (0xfffffffful - 1ul) / repeat)
then failwith "Repeated input is too large\n"
else
(push_frame ();
let computed = B.alloca 0uy tlen in
assert_norm (v 0xfffffffful = pow2 32 - 1);
assert (v input_len * v repeat + 1 < pow2 32);
let total_input_len = input_len * repeat in
let total_input = B.alloca 0uy (total_input_len + 1ul) in
let total_input = B.sub total_input 0ul total_input_len in
let h0 = get () in
C.Loops.for 0ul
repeat
(fun h i -> B.live h total_input /\ B.modifies (B.loc_buffer total_input) h0 h)
(fun i ->
assert (v input_len * v i + v input_len <=
v input_len * (v repeat - 1) + v input_len);
assert (v input_len * v i + v input_len <= v input_len * v repeat);
C.String.memcpy (B.sub total_input (input_len * i) input_len) input input_len);
EverCrypt.Hash.uint32_fits_maxLength a total_input_len;
assert ((v total_input_len) `Spec.Hash.Definitions.less_than_max_input_length` a);
EverCrypt.Hash.Incremental.hash a computed total_input total_input_len;
B.recall expected;
let str = string_of_alg a in
TestLib.compare_and_print str expected computed tlen;
pop_frame ()) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_get_path_step_pre | val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True)) | val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True)) | let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 41,
"end_line": 1877,
"start_col": 0,
"start_line": 1875
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.const_path_p -> i: LowStar.Vector.uint32_t
-> FStar.HyperStack.ST.ST Prims.bool | FStar.HyperStack.ST.ST | [] | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.mt_get_path_step_pre_nst",
"Prims.bool",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | false | true | false | false | false | let mt_get_path_step_pre #hsz mtr p i =
| let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i | false |
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.erased_lid | val erased_lid : Prims.list Prims.string | let erased_lid = ["FStar"; "Ghost"; "erased"] | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 45,
"end_line": 35,
"start_col": 0,
"start_line": 35
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u)
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.list Prims.string | Prims.Tot | [
"total"
] | [] | [
"Prims.Cons",
"Prims.string",
"Prims.Nil"
] | [] | false | false | false | true | false | let erased_lid =
| ["FStar"; "Ghost"; "erased"] | false |
|
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.hide_lid | val hide_lid : Prims.list Prims.string | let hide_lid = ["FStar"; "Ghost"; "hide"] | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 41,
"end_line": 36,
"start_col": 0,
"start_line": 36
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u)
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.list Prims.string | Prims.Tot | [
"total"
] | [] | [
"Prims.Cons",
"Prims.string",
"Prims.Nil"
] | [] | false | false | false | true | false | let hide_lid =
| ["FStar"; "Ghost"; "hide"] | false |
|
MerkleTree.Low.fst | MerkleTree.Low.mt_get_path_step | val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True )) | val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True )) | let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 59,
"end_line": 1894,
"start_col": 0,
"start_line": 1892
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv)))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.const_path_p -> i: LowStar.Vector.uint32_t
-> FStar.HyperStack.ST.ST MerkleTree.Low.Datastructures.hash | FStar.HyperStack.ST.ST | [] | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"LowStar.Vector.uint32_t",
"LowStar.Vector.index",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"MerkleTree.Low.__proj__Path__item__hashes",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.ConstBuffer.cast",
"FStar.Ghost.reveal"
] | [] | false | true | false | false | false | let mt_get_path_step #hsz mtr p i =
| let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_path_length_step | val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd}) | val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd}) | let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 16,
"end_line": 1758,
"start_col": 0,
"start_line": 1754
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool -> | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | k: MerkleTree.Low.index_t -> j: MerkleTree.Low.index_t{k <= j} -> actd: Prims.bool
-> sl:
LowStar.Vector.uint32_t
{ FStar.UInt32.v sl =
MerkleTree.New.High.mt_path_length_step (FStar.UInt32.v k) (FStar.UInt32.v j) actd } | Prims.Tot | [
"total"
] | [] | [
"MerkleTree.Low.index_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"Prims.bool",
"Prims.op_Equality",
"FStar.UInt32.t",
"FStar.UInt32.__uint_to_t",
"FStar.Integers.op_Percent",
"Prims.op_BarBar",
"Prims.op_AmpAmp",
"FStar.Integers.op_Plus",
"Prims.op_Negation",
"LowStar.Vector.uint32_t",
"Prims.int",
"Prims.l_or",
"FStar.UInt.size",
"FStar.UInt32.n",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt32.v",
"MerkleTree.New.High.mt_path_length_step"
] | [] | false | false | false | false | false | let mt_path_length_step k j actd =
| if j = 0ul
then 0ul
else (if k % 2ul = 0ul then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul) else 1ul) | false |
MerkleTree.Low.fst | MerkleTree.Low.lift_path_index_ | val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))] | val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))] | let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 45,
"end_line": 1120,
"start_col": 0,
"start_line": 1117
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))] | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
h: FStar.Monotonic.HyperStack.mem ->
hs: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j: FStar.Integers.nat{i <= j && j <= FStar.Seq.Base.length hs} ->
k: FStar.Integers.nat{i <= k && k < j}
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.forall_seq hs
i
j
(fun hp -> Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h hp))
(ensures
Rgl?.r_repr (MerkleTree.Low.Datastructures.hreg hsz) h (FStar.Seq.Base.index hs k) ==
FStar.Seq.Base.index (MerkleTree.Low.lift_path_ h hs i j) (k - i))
(decreases j)
[SMTPat (FStar.Seq.Base.index (MerkleTree.Low.lift_path_ h hs i j) (k - i))] | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims.b2t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"FStar.Integers.op_Less",
"Prims.op_Equality",
"Prims.bool",
"FStar.Integers.int_t",
"FStar.Integers.op_Subtraction",
"MerkleTree.Low.lift_path_index_",
"Prims.unit"
] | [
"recursion"
] | false | false | true | false | false | let rec lift_path_index_ #hsz h hs i j k =
| if i = j then () else if k = j - 1 then () else lift_path_index_ #hsz h hs i (j - 1) k | false |
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.reveal_lid | val reveal_lid : Prims.list Prims.string | let reveal_lid = ["FStar"; "Ghost"; "reveal"] | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 45,
"end_line": 37,
"start_col": 0,
"start_line": 37
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u)
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"] | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.list Prims.string | Prims.Tot | [
"total"
] | [] | [
"Prims.Cons",
"Prims.string",
"Prims.Nil"
] | [] | false | false | false | true | false | let reveal_lid =
| ["FStar"; "Ghost"; "reveal"] | false |
|
MerkleTree.Low.fst | MerkleTree.Low.path_preserved_ | val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j) | val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j) | let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1) | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 59,
"end_line": 1242,
"start_col": 0,
"start_line": 1234
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mtr: FStar.Monotonic.HyperHeap.rid ->
hs: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j: FStar.Integers.nat{i <= j && j <= FStar.Seq.Base.length hs} ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.forall_seq hs
i
j
(fun hp ->
Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h0 hp /\
FStar.Monotonic.HyperHeap.includes mtr
(Rgl?.region_of (MerkleTree.Low.Datastructures.hreg hsz) hp)) /\
LowStar.Monotonic.Buffer.loc_disjoint dl
(LowStar.Monotonic.Buffer.loc_all_regions_from false mtr) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1)
(ensures
(MerkleTree.Low.path_safe_preserved_ mtr hs i j dl h0 h1;
FStar.Seq.Base.equal (MerkleTree.Low.lift_path_ h0 hs i j)
(MerkleTree.Low.lift_path_ h1 hs i j)))
(decreases j) | FStar.Pervasives.Lemma | [
"lemma",
""
] | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims.b2t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims.op_Equality",
"Prims.bool",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Seq.Base.index",
"FStar.Integers.op_Subtraction",
"Prims.unit",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Regional.__proj__Rgl__item__region_of",
"MerkleTree.Low.path_preserved_",
"MerkleTree.Low.path_safe_preserved_"
] | [
"recursion"
] | false | false | true | false | false | let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
| if i = j
then ()
else
(path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes (B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1) | false |
MerkleTree.Low.fst | MerkleTree.Low.mt_get_path_pre | val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True)) | val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True)) | let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "7d7bdc20f2033171e279c176b26e84f9069d23c6",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | {
"end_col": 38,
"end_line": 2016,
"start_col": 0,
"start_line": 2012
} | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p))) | {
"checked_file": "/",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false |
mt: MerkleTree.Low.const_mt_p ->
idx: MerkleTree.Low.offset_t ->
p: MerkleTree.Low.const_path_p ->
root: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.bool | FStar.HyperStack.ST.ST | [] | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.offset_t",
"MerkleTree.Low.const_path_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"MerkleTree.Low.mt_get_path_pre_nst",
"Prims.bool",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"MerkleTree.Low.merkle_tree",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | false | true | false | false | false | let mt_get_path_pre #_ mt idx p root =
| let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root | false |
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.vprop_fv | val vprop_fv : FStar.Stubs.Reflection.Types.fv | let vprop_fv = R.pack_fv vprop_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 34,
"end_line": 39,
"start_col": 0,
"start_line": 39
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u)
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"] | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | FStar.Stubs.Reflection.Types.fv | Prims.Tot | [
"total"
] | [] | [
"FStar.Stubs.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.vprop_lid"
] | [] | false | false | false | true | false | let vprop_fv =
| R.pack_fv vprop_lid | false |
|
Pulse.Reflection.Util.fst | Pulse.Reflection.Util.vprop_lid | val vprop_lid : Prims.list Prims.string | let vprop_lid = mk_pulse_lib_core_lid "vprop" | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "f984200f79bdc452374ae994a5ca837496476c41",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | {
"end_col": 45,
"end_line": 38,
"start_col": 0,
"start_line": 38
} | (*
Copyright 2023 Microsoft Research
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let u_two = RT.(u_succ (u_succ u_zero))
let u_max_two u = (RT.u_max u_two u)
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"] | {
"checked_file": "/",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | false | Prims.list Prims.string | Prims.Tot | [
"total"
] | [] | [
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | false | false | false | true | false | let vprop_lid =
| mk_pulse_lib_core_lid "vprop" | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.