effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
FStar.HyperStack.ST.ST | val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd) | val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p = | true | null | false | let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"MerkleTree.Low.__proj__Path__item__hashes",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True)) | [] | MerkleTree.Low.mt_get_path_length | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.const_path_p
-> FStar.HyperStack.ST.ST LowStar.Vector.uint32_t | {
"end_col": 29,
"end_line": 1786,
"start_col": 30,
"start_line": 1784
} |
FStar.HyperStack.ST.ST | val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True )) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i | val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i = | true | null | false | let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"LowStar.Vector.uint32_t",
"LowStar.Vector.index",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"MerkleTree.Low.__proj__Path__item__hashes",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.ConstBuffer.cast",
"FStar.Ghost.reveal"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv)))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True )) | [] | MerkleTree.Low.mt_get_path_step | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.const_path_p -> i: LowStar.Vector.uint32_t
-> FStar.HyperStack.ST.ST MerkleTree.Low.Datastructures.hash | {
"end_col": 59,
"end_line": 1894,
"start_col": 35,
"start_line": 1892
} |
FStar.HyperStack.ST.ST | val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx | val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx = | true | null | false | let mt = CB.cast mt in
let h0 = HST.get () in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.offset_t",
"MerkleTree.Low.mt_flush_to_pre_nst",
"Prims.bool",
"MerkleTree.Low.merkle_tree",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True)) | [] | MerkleTree.Low.mt_flush_to_pre | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.const_mt_p -> idx: MerkleTree.Low.offset_t -> FStar.HyperStack.ST.ST Prims.bool | {
"end_col": 29,
"end_line": 2432,
"start_col": 28,
"start_line": 2428
} |
FStar.HyperStack.ST.ST | val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p | val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p = | true | null | false | let pv = !*p in
V.free (Path?.hashes pv);
B.free p | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.path_p",
"LowStar.Monotonic.Buffer.free",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"Prims.unit",
"LowStar.Vector.free",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"MerkleTree.Low.__proj__Path__item__hashes",
"LowStar.BufferOps.op_Bang_Star"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 -> | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1)) | [] | MerkleTree.Low.free_path | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | p: MerkleTree.Low.path_p -> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 10,
"end_line": 1306,
"start_col": 17,
"start_line": 1303
} |
FStar.HyperStack.ST.ST | val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root | val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root = | true | null | false | let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.offset_t",
"MerkleTree.Low.const_path_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"MerkleTree.Low.mt_get_path_pre_nst",
"Prims.bool",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"MerkleTree.Low.merkle_tree",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True)) | [] | MerkleTree.Low.mt_get_path_pre | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mt: MerkleTree.Low.const_mt_p ->
idx: MerkleTree.Low.offset_t ->
p: MerkleTree.Low.const_path_p ->
root: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.bool | {
"end_col": 38,
"end_line": 2016,
"start_col": 38,
"start_line": 2012
} |
FStar.HyperStack.ST.ST | val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt | val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt = | true | null | false | let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"MerkleTree.Low.mt_get_root_pre_nst",
"Prims.unit",
"Prims._assert",
"Prims.b2t",
"Prims.op_Equality",
"MerkleTree.Low.__proj__MT__item__hash_size",
"Prims.bool",
"MerkleTree.Low.merkle_tree",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True)) | [] | MerkleTree.Low.mt_get_root_pre | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.const_mt_p -> rt: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.bool | {
"end_col": 27,
"end_line": 1532,
"start_col": 32,
"start_line": 1527
} |
FStar.HyperStack.ST.ST | val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul) | val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r = | true | null | false | let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"FStar.HyperStack.ST.erid",
"LowStar.Buffer.malloc",
"MerkleTree.Low.path",
"FStar.UInt32.__uint_to_t",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.UInt32.v",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.freeable",
"MerkleTree.Low.Path",
"LowStar.Vector.vector",
"MerkleTree.Low.Datastructures.hash",
"LowStar.Regional.rg_alloc",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.path_p",
"FStar.HyperStack.ST.new_region"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\ | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty)) | [] | MerkleTree.Low.init_path | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
hsz: MerkleTree.Low.Datastructures.hash_size_t ->
mtr: FStar.Monotonic.HyperHeap.rid ->
r: FStar.HyperStack.ST.erid
-> FStar.HyperStack.ST.ST MerkleTree.Low.path_p | {
"end_col": 57,
"end_line": 1278,
"start_col": 25,
"start_line": 1276
} |
Prims.Tot | val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd}) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul) | val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd = | false | null | false | if j = 0ul
then 0ul
else (if k % 2ul = 0ul then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul) else 1ul) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"total"
] | [
"MerkleTree.Low.index_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"Prims.bool",
"Prims.op_Equality",
"FStar.UInt32.t",
"FStar.UInt32.__uint_to_t",
"FStar.Integers.op_Percent",
"Prims.op_BarBar",
"Prims.op_AmpAmp",
"FStar.Integers.op_Plus",
"Prims.op_Negation",
"LowStar.Vector.uint32_t",
"Prims.int",
"Prims.l_or",
"FStar.UInt.size",
"FStar.UInt32.n",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt32.v",
"MerkleTree.New.High.mt_path_length_step"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd}) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd}) | [] | MerkleTree.Low.mt_path_length_step | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | k: MerkleTree.Low.index_t -> j: MerkleTree.Low.index_t{k <= j} -> actd: Prims.bool
-> sl:
LowStar.Vector.uint32_t
{ FStar.UInt32.v sl =
MerkleTree.New.High.mt_path_length_step (FStar.UInt32.v k) (FStar.UInt32.v j) actd } | {
"end_col": 16,
"end_line": 1758,
"start_col": 2,
"start_line": 1755
} |
FStar.HyperStack.ST.ST | val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv)) | val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p = | true | null | false | let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv)) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.BufferOps.op_Star_Equals",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.Path",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Vector.clear",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hashes",
"Prims.unit",
"LowStar.BufferOps.op_Bang_Star"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\ | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty)) | [] | MerkleTree.Low.clear_path | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.path_p -> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 60,
"end_line": 1292,
"start_col": 22,
"start_line": 1290
} |
FStar.Pervasives.Lemma | val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i) | val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i = | false | null | true | lift_path_index_ h (V.as_seq h (phashes h p)) 0 (S.length (V.as_seq h (phashes h p))) (U32.v i) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.lift_path_index_",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.as_seq",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.phashes",
"FStar.Seq.Base.length",
"FStar.UInt32.v",
"Prims.unit"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i))) | [] | MerkleTree.Low.lift_path_index | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
h: FStar.Monotonic.HyperStack.mem ->
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.path_p ->
i: LowStar.Vector.uint32_t
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.path_safe h mtr p /\ i < LowStar.Vector.size_of (MerkleTree.Low.phashes h p))
(ensures
(let hsz = Path?.hash_size (LowStar.Monotonic.Buffer.get h p 0) in
Rgl?.r_repr (MerkleTree.Low.Datastructures.hreg hsz)
h
(LowStar.Vector.get h (MerkleTree.Low.phashes h p) i) ==
FStar.Seq.Base.index (MerkleTree.Low.lift_path h mtr p) (FStar.UInt32.v i))) | {
"end_col": 53,
"end_line": 1133,
"start_col": 2,
"start_line": 1132
} |
Prims.GTot | val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r}) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv)) | val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv = | false | null | false | mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv)) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.merkle_tree",
"Prims.l_and",
"LowStar.RVector.rv_inv",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__rhs",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"MerkleTree.Low.__proj__MT__item__mroot",
"MerkleTree.Low.mt_safe_elts",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__i",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.New.High.MT",
"FStar.UInt32.v",
"LowStar.RVector.as_seq",
"MerkleTree.Low.__proj__MT__item__rhs_ok",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"FStar.Ghost.reveal",
"MerkleTree.Spec.hash_fun_t",
"MerkleTree.Low.__proj__MT__item__hash_spec",
"Prims.unit",
"MerkleTree.Low.mt_safe_elts_spec",
"MerkleTree.New.High.merkle_tree",
"MerkleTree.New.High.mt_wf_elts"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r}) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r}) | [] | MerkleTree.Low.merkle_tree_lift | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
h: FStar.Monotonic.HyperStack.mem ->
mtv:
MerkleTree.Low.merkle_tree
{ LowStar.RVector.rv_inv h (MT?.hs mtv) /\ LowStar.RVector.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
MerkleTree.Low.mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) }
-> Prims.GTot (r: MerkleTree.New.High.merkle_tree{MerkleTree.New.High.mt_wf_elts r}) | {
"end_col": 38,
"end_line": 293,
"start_col": 2,
"start_line": 285
} |
Prims.GTot | val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))}) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) | val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p = | false | null | false | lift_path_ h (V.as_seq h (phashes h p)) 0 (S.length (V.as_seq h (phashes h p))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"sometrivial"
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"Prims.l_and",
"MerkleTree.Low.path_safe",
"Prims.b2t",
"Prims.op_Equality",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.lift_path_",
"LowStar.Vector.as_seq",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.phashes",
"FStar.Seq.Base.length",
"MerkleTree.New.High.path",
"FStar.UInt32.v",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"MerkleTree.New.High.hash",
"LowStar.Vector.size_of"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))}) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))}) | [] | MerkleTree.Low.lift_path | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
h: FStar.Monotonic.HyperStack.mem ->
mtr: FStar.Monotonic.HyperHeap.rid ->
p:
MerkleTree.Low.path_p
{ MerkleTree.Low.path_safe h mtr p /\
Path?.hash_size (LowStar.Monotonic.Buffer.get h p 0) = hsz }
-> Prims.GTot
(hp:
MerkleTree.New.High.path
{ FStar.Seq.Base.length hp =
FStar.UInt32.v (LowStar.Vector.size_of (MerkleTree.Low.phashes h p)) }) | {
"end_col": 43,
"end_line": 1103,
"start_col": 2,
"start_line": 1102
} |
FStar.Pervasives.Lemma | val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)] | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1) | val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 = | false | null | true | if lv = merkle_tree_size_lg
then ()
else
(V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma",
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.index_t",
"FStar.Integers.op_Greater_Equals",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims.bool",
"MerkleTree.Low.mt_safe_elts_preserved",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"FStar.Integers.op_Slash",
"Prims.unit",
"LowStar.Vector.get_preserved"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2" | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)] | [
"recursion"
] | MerkleTree.Low.mt_safe_elts_preserved | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
i: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{j >= i} ->
p: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.live h0 hs /\ MerkleTree.Low.mt_safe_elts h0 lv hs i j /\
LowStar.Monotonic.Buffer.loc_disjoint p
(LowStar.Vector.loc_vector_within hs lv (LowStar.Vector.size_of hs)) /\
LowStar.Monotonic.Buffer.modifies p h0 h1)
(ensures MerkleTree.Low.mt_safe_elts h1 lv hs i j)
(decreases 32 - FStar.UInt32.v lv)
[
SMTPat (LowStar.Vector.live h0 hs);
SMTPat (MerkleTree.Low.mt_safe_elts h0 lv hs i j);
SMTPat (LowStar.Monotonic.Buffer.loc_disjoint p (LowStar.RVector.loc_rvector hs));
SMTPat (LowStar.Monotonic.Buffer.modifies p h0 h1)
] | {
"end_col": 77,
"end_line": 205,
"start_col": 2,
"start_line": 203
} |
FStar.Pervasives.Lemma | val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)) | val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
let mt_flush_to_modifies_rec_helper #hsz lv hs h = | false | null | true | assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4 (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.loc_union_assoc_4",
"LowStar.RVector.rs_loc_elem",
"MerkleTree.Low.Datastructures.hvreg",
"LowStar.Vector.as_seq",
"FStar.UInt32.v",
"LowStar.Vector.loc_vector_within",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"LowStar.RVector.rv_loc_elems",
"Prims.unit",
"Prims._assert",
"Prims.eq2",
"LowStar.Monotonic.Buffer.loc",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.RVector.rs_loc_elems_rec_inverse"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2" | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs))) | [] | MerkleTree.Low.mt_flush_to_modifies_rec_helper | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
lv: LowStar.Vector.uint32_t{lv < MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union (LowStar.RVector.rs_loc_elem
(MerkleTree.Low.Datastructures.hvreg hsz)
(LowStar.Vector.as_seq h hs)
(FStar.UInt32.v lv))
(LowStar.Vector.loc_vector_within hs lv (lv + 1ul)))
(LowStar.Monotonic.Buffer.loc_union (LowStar.RVector.rv_loc_elems h
hs
(lv + 1ul)
(LowStar.Vector.size_of hs))
(LowStar.Vector.loc_vector_within hs (lv + 1ul) (LowStar.Vector.size_of hs))) ==
LowStar.Monotonic.Buffer.loc_union (LowStar.RVector.rv_loc_elems h
hs
lv
(LowStar.Vector.size_of hs))
(LowStar.Vector.loc_vector_within hs lv (LowStar.Vector.size_of hs))) | {
"end_col": 54,
"end_line": 2195,
"start_col": 2,
"start_line": 2184
} |
Prims.Tot | val offsets_connect (x y: offset_t) : Tot bool | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit | val offsets_connect (x y: offset_t) : Tot bool
let offsets_connect (x y: offset_t) : Tot bool = | false | null | false | y >= x && (y - x) <= offset_range_limit | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"total"
] | [
"MerkleTree.Low.offset_t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Greater_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W64",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.op_Subtraction",
"MerkleTree.Low.offset_range_limit",
"Prims.bool"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32 | false | true | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val offsets_connect (x y: offset_t) : Tot bool | [] | MerkleTree.Low.offsets_connect | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | x: MerkleTree.Low.offset_t -> y: MerkleTree.Low.offset_t -> Prims.bool | {
"end_col": 97,
"end_line": 62,
"start_col": 58,
"start_line": 62
} |
Prims.GTot | val mt_not_full: HS.mem -> mt_p -> GTot bool | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_not_full h mt = mt_not_full_nst (B.get h mt 0) | val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = | false | null | false | mt_not_full_nst (B.get h mt 0) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.mt_p",
"MerkleTree.Low.mt_not_full_nst",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"Prims.bool"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_not_full: HS.mem -> mt_p -> GTot bool | [] | MerkleTree.Low.mt_not_full | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | h: FStar.Monotonic.HyperStack.mem -> mt: MerkleTree.Low.mt_p -> Prims.GTot Prims.bool | {
"end_col": 53,
"end_line": 120,
"start_col": 23,
"start_line": 120
} |
FStar.Pervasives.Lemma | val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1) | val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 = | false | null | true | if i = j
then ()
else
(path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes (B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma",
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims.b2t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims.op_Equality",
"Prims.bool",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Seq.Base.index",
"FStar.Integers.op_Subtraction",
"Prims.unit",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Regional.__proj__Rgl__item__region_of",
"MerkleTree.Low.path_preserved_",
"MerkleTree.Low.path_safe_preserved_"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1" | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j) | [
"recursion"
] | MerkleTree.Low.path_preserved_ | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mtr: FStar.Monotonic.HyperHeap.rid ->
hs: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j: FStar.Integers.nat{i <= j && j <= FStar.Seq.Base.length hs} ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.forall_seq hs
i
j
(fun hp ->
Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h0 hp /\
FStar.Monotonic.HyperHeap.includes mtr
(Rgl?.region_of (MerkleTree.Low.Datastructures.hreg hsz) hp)) /\
LowStar.Monotonic.Buffer.loc_disjoint dl
(LowStar.Monotonic.Buffer.loc_all_regions_from false mtr) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1)
(ensures
([@@ FStar.Pervasives.inline_let ]let _ =
MerkleTree.Low.path_safe_preserved_ mtr hs i j dl h0 h1
in
FStar.Seq.Base.equal (MerkleTree.Low.lift_path_ h0 hs i j)
(MerkleTree.Low.lift_path_ h1 hs i j)))
(decreases j) | {
"end_col": 59,
"end_line": 1242,
"start_col": 2,
"start_line": 1235
} |
Prims.Tot | val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_get_root_pre_nst mtv rt = true | val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = | false | null | false | true | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"total"
] | [
"MerkleTree.Low.merkle_tree",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__MT__item__hash_size",
"Prims.bool"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool | [] | MerkleTree.Low.mt_get_root_pre_nst | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mtv: MerkleTree.Low.merkle_tree -> rt: MerkleTree.Low.Datastructures.hash -> Prims.bool | {
"end_col": 37,
"end_line": 1514,
"start_col": 33,
"start_line": 1514
} |
FStar.Pervasives.Lemma | val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs | val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs = | false | null | true | if lv = merkle_tree_size_lg then () else mt_safe_elts_init #hsz h (lv + 1ul) hs | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma",
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"Prims.bool",
"MerkleTree.Low.mt_safe_elts_init",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"Prims.unit"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv)) | [
"recursion"
] | MerkleTree.Low.mt_safe_elts_init | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
h: FStar.Monotonic.HyperStack.mem ->
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg}
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.forall_ h
hs
lv
(LowStar.Vector.size_of hs)
(fun hv -> LowStar.Vector.size_of hv = 0ul))
(ensures MerkleTree.Low.mt_safe_elts h lv hs 0ul 0ul)
(decreases 32 - FStar.UInt32.v lv) | {
"end_col": 45,
"end_line": 182,
"start_col": 2,
"start_line": 181
} |
Prims.GTot | val mt_safe: HS.mem -> mt_p -> GTot Type0 | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv))) | val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt = | false | null | false | B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
RV.rv_inv h (MT?.hs mtv) /\ RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.mt_p",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"LowStar.Monotonic.Buffer.freeable",
"LowStar.RVector.rv_inv",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__rhs",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"MerkleTree.Low.__proj__MT__item__mroot",
"MerkleTree.Low.mt_safe_elts",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__i",
"MerkleTree.Low.__proj__MT__item__j",
"Prims.b2t",
"FStar.Monotonic.HyperHeap.extends",
"LowStar.Vector.frameOf",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"FStar.Monotonic.HyperHeap.disjoint",
"LowStar.Monotonic.Buffer.get"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0 | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_safe: HS.mem -> mt_p -> GTot Type0 | [] | MerkleTree.Low.mt_safe | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | h: FStar.Monotonic.HyperStack.mem -> mt: MerkleTree.Low.mt_p -> Prims.GTot Type0 | {
"end_col": 68,
"end_line": 227,
"start_col": 2,
"start_line": 214
} |
Prims.Tot | val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_retract_to_pre_nst mtv r =
offsets_connect (MT?.offset mtv) r &&
([@inline_let] let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv) | val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool
let mt_retract_to_pre_nst mtv r = | false | null | false | offsets_connect (MT?.offset mtv) r &&
([@@ inline_let ]let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"total"
] | [
"MerkleTree.Low.merkle_tree",
"MerkleTree.Low.offset_t",
"Prims.op_AmpAmp",
"MerkleTree.Low.offsets_connect",
"MerkleTree.Low.__proj__MT__item__offset",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.__proj__MT__item__i",
"FStar.Integers.op_Less",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.Low.index_t",
"MerkleTree.Low.split_offset",
"Prims.bool"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo
#pop-options
/// Retraction
private
val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
private
let rec mt_retract_to_ #hsz hs lv i s j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs then ()
else begin
// 1) Retract hashes at level `lv`.
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get() in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs then
begin
assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
// 3-0) Memory safety brought from the postcondition of the recursion
let hh3 = HST.get () in
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs) (U32.v lv + 1)
(U32.v i / 2) (U32.v s / 2) (U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
else begin
let hh3 = HST.get() in
assert ((modifies (loc_union
(RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0 hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_
(RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
end
#pop-options
private inline_for_extraction
val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool | false | true | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool | [] | MerkleTree.Low.mt_retract_to_pre_nst | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mtv: MerkleTree.Low.merkle_tree -> r: MerkleTree.Low.offset_t -> Prims.bool | {
"end_col": 35,
"end_line": 2749,
"start_col": 2,
"start_line": 2747
} |
FStar.HyperStack.ST.ST | val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after))))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv)) | val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
let mt_path_insert #hsz mtr p hp = | true | null | false | let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_ mtr
(V.as_seq hh0 pv)
0
(S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv))
hh0
hh1;
path_preserved_ mtr
(V.as_seq hh0 pv)
0
(S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv))
hh0
hh1;
Rgl?.r_sep (hreg hsz) hp (B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_ mtr
(V.as_seq hh1 ipv)
0
(S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p))
hh1
hh2;
path_preserved_ mtr
(V.as_seq hh1 ipv)
0
(S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p))
hh1
hh2;
Rgl?.r_sep (hreg hsz) hp (B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp) 0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv)) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.lift_path_eq",
"MerkleTree.Low.__proj__Path__item__hash_size",
"FStar.Seq.Properties.snoc",
"LowStar.Vector.as_seq",
"FStar.Seq.Base.length",
"Prims.unit",
"Prims._assert",
"FStar.Seq.Base.equal",
"MerkleTree.New.High.hash",
"FStar.UInt32.v",
"MerkleTree.Low.lift_path",
"MerkleTree.Low.lift_path_",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hreg",
"LowStar.Monotonic.Buffer.loc_region_only",
"LowStar.Monotonic.Buffer.frameOf",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.path_preserved_",
"MerkleTree.Low.path_safe_preserved_",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.BufferOps.op_Star_Equals",
"MerkleTree.Low.Path",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Vector.frameOf",
"LowStar.Vector.vector",
"LowStar.Vector.insert",
"MerkleTree.Low.__proj__Path__item__hashes",
"LowStar.BufferOps.op_Bang_Star"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after))))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after))))) | [] | MerkleTree.Low.mt_path_insert | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.path_p ->
hp: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 34,
"end_line": 1739,
"start_col": 34,
"start_line": 1711
} |
Prims.GTot | val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r}) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0) | val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt = | false | null | false | merkle_tree_lift h (B.get h mt 0) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.mt_p",
"MerkleTree.Low.mt_safe",
"MerkleTree.Low.merkle_tree_lift",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.New.High.merkle_tree",
"FStar.UInt32.v",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.New.High.mt_wf_elts"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r}) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r}) | [] | MerkleTree.Low.mt_lift | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | h: FStar.Monotonic.HyperStack.mem -> mt: MerkleTree.Low.mt_p{MerkleTree.Low.mt_safe h mt}
-> Prims.GTot (r: MerkleTree.New.High.merkle_tree{MerkleTree.New.High.mt_wf_elts r}) | {
"end_col": 35,
"end_line": 299,
"start_col": 2,
"start_line": 299
} |
FStar.HyperStack.ST.ST | val mt_retract_to:
mt:mt_p ->
r:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_retract_to_pre_nst (B.get h0 mt 0) r))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let r = split_offset off r in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_retract_to (mt_lift h0 mt) (U32.v r) == mt_lift h1 mt))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_retract_to mt r =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let r = split_offset offset r in
let hs = MT?.hs mtv in
mt_retract_to_ hs 0ul (MT?.i mtv) (r + 1ul) (MT?.j mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv) (MT?.offset mtv) (MT?.i mtv) (r+1ul) hs false (MT?.rhs mtv) (MT?.mroot mtv) (MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs (MT?.i mtv) (r+1ul) (B.loc_buffer mt) hh1 hh2 | val mt_retract_to:
mt:mt_p ->
r:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_retract_to_pre_nst (B.get h0 mt 0) r))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let r = split_offset off r in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_retract_to (mt_lift h0 mt) (U32.v r) == mt_lift h1 mt)))
let mt_retract_to mt r = | true | null | false | let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let r = split_offset offset r in
let hs = MT?.hs mtv in
mt_retract_to_ hs 0ul (MT?.i mtv) (r + 1ul) (MT?.j mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved (MT?.rhs mtv)
(loc_union (RV.rv_loc_elems hh0 hs 0ul (V.size_of hs)) (V.loc_vector_within hs 0ul (V.size_of hs))
)
hh0
hh1;
RV.as_seq_preserved (MT?.rhs mtv)
(loc_union (RV.rv_loc_elems hh0 hs 0ul (V.size_of hs)) (V.loc_vector_within hs 0ul (V.size_of hs))
)
hh0
hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv))
(MT?.mroot mtv)
(loc_union (RV.rv_loc_elems hh0 hs 0ul (V.size_of hs)) (V.loc_vector_within hs 0ul (V.size_of hs))
)
hh0
hh1;
mt *=
MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(r + 1ul)
hs
false
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs (MT?.i mtv) (r + 1ul) (B.loc_buffer mt) hh1 hh2 | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.mt_p",
"MerkleTree.Low.offset_t",
"MerkleTree.Low.mt_safe_elts_preserved",
"MerkleTree.Low.__proj__MT__item__hash_size",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__i",
"FStar.Integers.op_Plus",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowStar.Monotonic.Buffer.loc_buffer",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"Prims.unit",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.RVector.as_seq_preserved",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"LowStar.RVector.rv_inv_preserved",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.BufferOps.op_Star_Equals",
"MerkleTree.Low.MT",
"MerkleTree.Low.__proj__MT__item__offset",
"MerkleTree.Low.__proj__MT__item__hash_spec",
"MerkleTree.Low.__proj__MT__item__hash_fun",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.RVector.rv_loc_elems",
"LowStar.Vector.size_of",
"LowStar.Vector.loc_vector_within",
"LowStar.Vector.loc_vector_within_included",
"LowStar.RVector.rv_loc_elems_included",
"MerkleTree.Low.mt_retract_to_",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.b2t",
"Prims.op_Equality",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.index_t",
"MerkleTree.Low.split_offset",
"LowStar.BufferOps.op_Bang_Star"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo
#pop-options
/// Retraction
private
val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
private
let rec mt_retract_to_ #hsz hs lv i s j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs then ()
else begin
// 1) Retract hashes at level `lv`.
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get() in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs then
begin
assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
// 3-0) Memory safety brought from the postcondition of the recursion
let hh3 = HST.get () in
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs) (U32.v lv + 1)
(U32.v i / 2) (U32.v s / 2) (U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
else begin
let hh3 = HST.get() in
assert ((modifies (loc_union
(RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0 hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_
(RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
end
#pop-options
private inline_for_extraction
val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool
let mt_retract_to_pre_nst mtv r =
offsets_connect (MT?.offset mtv) r &&
([@inline_let] let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv)
val mt_retract_to_pre: mt:const_mt_p -> r:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_retract_to_pre mt r =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_retract_to_pre_nst mtv r
#push-options "--z3rlimit 100"
val mt_retract_to:
mt:mt_p ->
r:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_retract_to_pre_nst (B.get h0 mt 0) r))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let r = split_offset off r in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\ | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_retract_to:
mt:mt_p ->
r:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_retract_to_pre_nst (B.get h0 mt 0) r))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let r = split_offset off r in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_retract_to (mt_lift h0 mt) (U32.v r) == mt_lift h1 mt))) | [] | MerkleTree.Low.mt_retract_to | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.mt_p -> r: MerkleTree.Low.offset_t -> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 77,
"end_line": 2810,
"start_col": 24,
"start_line": 2776
} |
FStar.HyperStack.ST.ST | val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2 | val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
let mt_insert hsz mt v = | true | null | false | let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz
#(Ghost.reveal (MT?.hash_spec mtv))
0ul
(Ghost.hide (MT?.i mtv))
(MT?.j mtv)
hs
v
(MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved (MT?.rhs mtv)
(loc_union (loc_union (RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0
hh1;
RV.as_seq_preserved (MT?.rhs mtv)
(loc_union (loc_union (RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0
hh1;
Rgl?.r_sep (hreg hsz)
(MT?.mroot mtv)
(loc_union (loc_union (RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0
hh1;
mt *=
MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt) hh1 hh2 | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.mt_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"MerkleTree.Low.mt_safe_elts_preserved",
"MerkleTree.Low.__proj__MT__item__hash_size",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__hs",
"MerkleTree.Low.__proj__MT__item__i",
"FStar.Integers.op_Plus",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.__proj__MT__item__j",
"LowStar.Monotonic.Buffer.loc_buffer",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"Prims.unit",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.RVector.as_seq_preserved",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"LowStar.RVector.rv_inv_preserved",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.BufferOps.op_Star_Equals",
"MerkleTree.Low.MT",
"MerkleTree.Low.__proj__MT__item__offset",
"MerkleTree.Low.__proj__MT__item__hash_spec",
"MerkleTree.Low.__proj__MT__item__hash_fun",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.RVector.rv_loc_elems",
"LowStar.Vector.size_of",
"LowStar.Vector.loc_vector_within",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"LowStar.Vector.loc_vector_within_included",
"LowStar.RVector.rv_loc_elems_included",
"MerkleTree.Low.insert_",
"FStar.Ghost.hide",
"MerkleTree.Low.index_t",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.b2t",
"Prims.op_Equality",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.merkle_tree_size_lg",
"LowStar.BufferOps.op_Bang_Star"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 40,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v))) | [] | MerkleTree.Low.mt_insert | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
hsz: FStar.Ghost.erased MerkleTree.Low.Datastructures.hash_size_t ->
mt: MerkleTree.Low.mt_p ->
v: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 11,
"end_line": 1018,
"start_col": 24,
"start_line": 964
} |
FStar.Pervasives.Lemma | val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1 | val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 = | false | null | true | assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1 | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"MerkleTree.Low.mt_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.mt_safe_elts_preserved",
"MerkleTree.Low.__proj__MT__item__hash_size",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__hs",
"MerkleTree.Low.__proj__MT__item__i",
"MerkleTree.Low.__proj__MT__item__j",
"Prims.unit",
"LowStar.Vector.loc_vector_within_included",
"MerkleTree.Low.Datastructures.hash_vec",
"LowStar.Vector.size_of",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.RVector.rv_inv_preserved",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hvreg",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"MerkleTree.Low.mt_loc",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.loc_vector",
"LowStar.RVector.loc_rvector",
"MerkleTree.Low.merkle_tree",
"LowStar.Monotonic.Buffer.get",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt)) | [] | MerkleTree.Low.mt_safe_preserved | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mt: MerkleTree.Low.mt_p ->
p: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.mt_safe h0 mt /\
LowStar.Monotonic.Buffer.loc_disjoint p (MerkleTree.Low.mt_loc mt) /\
LowStar.Monotonic.Buffer.modifies p h0 h1)
(ensures
LowStar.Monotonic.Buffer.get h0 mt 0 == LowStar.Monotonic.Buffer.get h1 mt 0 /\
MerkleTree.Low.mt_safe h1 mt) | {
"end_col": 73,
"end_line": 253,
"start_col": 2,
"start_line": 242
} |
Prims.GTot | val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0 | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p)) | val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p = | false | null | false | B.live h p /\ B.freeable p /\ V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h
(phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\ HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\ HH.disjoint mtr (B.frameOf p)) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"sometrivial"
] | [
"FStar.Monotonic.HyperStack.mem",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"Prims.l_and",
"LowStar.Monotonic.Buffer.live",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Monotonic.Buffer.freeable",
"LowStar.Vector.live",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.phashes",
"LowStar.Vector.freeable",
"FStar.HyperStack.ST.is_eternal_region",
"LowStar.Vector.frameOf",
"LowStar.Vector.forall_all",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hreg",
"Prims.b2t",
"FStar.Monotonic.HyperHeap.includes",
"LowStar.Regional.__proj__Rgl__item__region_of",
"FStar.Monotonic.HyperHeap.extends",
"LowStar.Monotonic.Buffer.frameOf",
"FStar.Monotonic.HyperHeap.disjoint"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0 | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0 | [] | MerkleTree.Low.path_safe | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | h: FStar.Monotonic.HyperStack.mem -> mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.path_p
-> Prims.GTot Type0 | {
"end_col": 32,
"end_line": 1077,
"start_col": 2,
"start_line": 1069
} |
FStar.Pervasives.Lemma | val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1 | val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 = | false | null | true | assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt)) (B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1 | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"MerkleTree.Low.mt_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Monotonic.Buffer.modifies_buffer_elim",
"Lib.IntTypes.uint8",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.merkle_tree",
"Prims.unit",
"LowStar.RVector.as_seq_preserved",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.loc_buffer",
"LowStar.RVector.loc_rvector",
"Prims.eq2"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt)) | [] | MerkleTree.Low.mt_preserved | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mt: MerkleTree.Low.mt_p ->
p: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.mt_safe h0 mt /\
LowStar.Monotonic.Buffer.loc_disjoint p (MerkleTree.Low.mt_loc mt) /\
LowStar.Monotonic.Buffer.modifies p h0 h1)
(ensures
([@@ FStar.Pervasives.inline_let ]let _ = MerkleTree.Low.mt_safe_preserved mt p h0 h1 in
MerkleTree.Low.mt_lift h0 mt == MerkleTree.Low.mt_lift h1 mt)) | {
"end_col": 60,
"end_line": 321,
"start_col": 2,
"start_line": 309
} |
FStar.Pervasives.Lemma | val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1 | val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 = | false | null | true | assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_ mtr
(V.as_seq h0 (phashes h0 p))
0
(S.length (V.as_seq h0 (phashes h0 p)))
dl
h0
h1 | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.path_safe_preserved_",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.as_seq",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.phashes",
"FStar.Seq.Base.length",
"Prims.unit",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"MerkleTree.Low.path_loc",
"LowStar.Vector.loc_vector",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p)) | [] | MerkleTree.Low.path_safe_preserved | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.path_p ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.path_safe h0 mtr p /\
LowStar.Monotonic.Buffer.loc_disjoint dl (MerkleTree.Low.path_loc p) /\
LowStar.Monotonic.Buffer.loc_disjoint dl
(LowStar.Monotonic.Buffer.loc_all_regions_from false mtr) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1) (ensures MerkleTree.Low.path_safe h1 mtr p) | {
"end_col": 54,
"end_line": 1203,
"start_col": 2,
"start_line": 1199
} |
FStar.Pervasives.Lemma | val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)))))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v) | val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
let as_seq_sub_upd #a #rst #rg h rv i v = | false | null | true | Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv) 0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i)) (RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg
h
(V.as_seq h rv)
0
(U32.v (V.size_of rv))
(U32.v i + 1)
(U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"LowStar.Regional.regional",
"FStar.Monotonic.HyperStack.mem",
"LowStar.RVector.rvector",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"LowStar.Vector.size_of",
"LowStar.Regional.__proj__Rgl__item__repr",
"Prims._assert",
"Prims.eq2",
"FStar.Seq.Base.index",
"FStar.Seq.Base.upd",
"LowStar.RVector.as_seq",
"FStar.UInt32.v",
"Prims.unit",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice",
"FStar.Integers.op_Plus",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"LowStar.RVector.as_seq_sub",
"FStar.UInt32.__uint_to_t",
"LowStar.RVector.as_seq_seq_slice",
"LowStar.Vector.as_seq",
"FStar.Seq.Properties.slice_upd"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20" | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 20,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)))))) | [] | MerkleTree.Low.as_seq_sub_upd | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
h: FStar.Monotonic.HyperStack.mem ->
rv: LowStar.RVector.rvector rg ->
i: LowStar.Vector.uint32_t{i < LowStar.Vector.size_of rv} ->
v: Rgl?.repr rg
-> FStar.Pervasives.Lemma (requires LowStar.RVector.rv_inv h rv)
(ensures
FStar.Seq.Base.equal (FStar.Seq.Base.upd (LowStar.RVector.as_seq h rv) (FStar.UInt32.v i) v)
(FStar.Seq.Base.append (LowStar.RVector.as_seq_sub h rv 0ul i)
(FStar.Seq.Properties.cons v
(LowStar.RVector.as_seq_sub h rv (i + 1ul) (LowStar.Vector.size_of rv))))) | {
"end_col": 70,
"end_line": 419,
"start_col": 2,
"start_line": 409
} |
FStar.Pervasives.Lemma | val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1) | val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 = | false | null | true | if i = j
then ()
else
(assert (loc_includes (B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma",
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims.b2t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims.op_Equality",
"Prims.bool",
"MerkleTree.Low.path_safe_preserved_",
"FStar.Integers.op_Subtraction",
"Prims.unit",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Seq.Base.index",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Regional.__proj__Rgl__item__region_of"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j) | [
"recursion"
] | MerkleTree.Low.path_safe_preserved_ | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mtr: FStar.Monotonic.HyperHeap.rid ->
hs: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j: FStar.Integers.nat{i <= j && j <= FStar.Seq.Base.length hs} ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.forall_seq hs
i
j
(fun hp ->
Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h0 hp /\
FStar.Monotonic.HyperHeap.includes mtr
(Rgl?.region_of (MerkleTree.Low.Datastructures.hreg hsz) hp)) /\
LowStar.Monotonic.Buffer.loc_disjoint dl
(LowStar.Monotonic.Buffer.loc_all_regions_from false mtr) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1)
(ensures
LowStar.Vector.forall_seq hs
i
j
(fun hp ->
Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h1 hp /\
FStar.Monotonic.HyperHeap.includes mtr
(Rgl?.region_of (MerkleTree.Low.Datastructures.hreg hsz) hp)))
(decreases j) | {
"end_col": 54,
"end_line": 1188,
"start_col": 2,
"start_line": 1182
} |
FStar.HyperStack.ST.ST | val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v | val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v = | true | null | false | let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"MerkleTree.Low.mt_insert_pre_nst",
"Prims.unit",
"Prims._assert",
"Prims.eq2",
"MerkleTree.Low.__proj__MT__item__hash_size",
"Prims.bool",
"MerkleTree.Low.merkle_tree",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True)) | [] | MerkleTree.Low.mt_insert_pre | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.const_mt_p -> v: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.bool | {
"end_col": 24,
"end_line": 935,
"start_col": 29,
"start_line": 932
} |
FStar.Pervasives.Lemma | val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)) | val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
let insert_modifies_rec_helper #hsz lv hs aloc h = | false | null | true | assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc
(loc_union (loc_union (RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc (loc_union (RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc
aloc;
loc_union_assoc (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union (RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4 (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.loc_union_assoc_4",
"LowStar.RVector.rs_loc_elem",
"MerkleTree.Low.Datastructures.hvreg",
"LowStar.Vector.as_seq",
"FStar.UInt32.v",
"LowStar.Vector.loc_vector_within",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"LowStar.RVector.rv_loc_elems",
"Prims.unit",
"LowStar.Monotonic.Buffer.loc_union_assoc",
"LowStar.Monotonic.Buffer.loc_union",
"Prims._assert",
"Prims.eq2",
"LowStar.RVector.rs_loc_elems_rec_inverse"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2" | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc) | [] | MerkleTree.Low.insert_modifies_rec_helper | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
lv: LowStar.Vector.uint32_t{lv < MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
aloc: LowStar.Monotonic.Buffer.loc ->
h: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(ensures
LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union
(LowStar.RVector.rs_loc_elem (MerkleTree.Low.Datastructures.hvreg hsz)
(LowStar.Vector.as_seq h hs)
(FStar.UInt32.v lv))
(LowStar.Vector.loc_vector_within hs lv (lv + 1ul)))
aloc)
(LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union (LowStar.RVector.rv_loc_elems
h
hs
(lv + 1ul)
(LowStar.Vector.size_of hs))
(LowStar.Vector.loc_vector_within hs (lv + 1ul) (LowStar.Vector.size_of hs)))
aloc) ==
LowStar.Monotonic.Buffer.loc_union (LowStar.Monotonic.Buffer.loc_union (LowStar.RVector.rv_loc_elems
h
hs
lv
(LowStar.Vector.size_of hs))
(LowStar.Vector.loc_vector_within hs lv (LowStar.Vector.size_of hs)))
aloc) | {
"end_col": 54,
"end_line": 659,
"start_col": 2,
"start_line": 625
} |
FStar.Pervasives.Lemma | val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1 | val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 = | false | null | true | assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p)) 0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1 | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"MerkleTree.Low.path_preserved_",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.as_seq",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.phashes",
"FStar.Seq.Base.length",
"Prims.unit",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"MerkleTree.Low.path_loc",
"LowStar.Vector.loc_vector",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a)) | [] | MerkleTree.Low.path_preserved | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.path_p ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.path_safe h0 mtr p /\
LowStar.Monotonic.Buffer.loc_disjoint dl (MerkleTree.Low.path_loc p) /\
LowStar.Monotonic.Buffer.loc_disjoint dl
(LowStar.Monotonic.Buffer.loc_all_regions_from false mtr) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1)
(ensures
([@@ FStar.Pervasives.inline_let ]let _ =
MerkleTree.Low.path_safe_preserved mtr p dl h0 h1
in
let hsz0 = Path?.hash_size (LowStar.Monotonic.Buffer.get h0 p 0) in
let hsz1 = Path?.hash_size (LowStar.Monotonic.Buffer.get h1 p 0) in
let b = MerkleTree.Low.lift_path h0 mtr p in
let a = MerkleTree.Low.lift_path h1 mtr p in
hsz0 = hsz1 /\ FStar.Seq.Base.equal b a)) | {
"end_col": 12,
"end_line": 1263,
"start_col": 2,
"start_line": 1259
} |
Prims.Tot | val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul) | val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root = | false | null | false | offsets_connect (MT?.offset mtv) idx && Path?.hash_size p = MT?.hash_size mtv &&
([@@ inline_let ]let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv && V.size_of (Path?.hashes p) = 0ul) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"total"
] | [
"MerkleTree.Low.merkle_tree",
"MerkleTree.Low.offset_t",
"MerkleTree.Low.path",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__MT__item__hash_size",
"Prims.op_AmpAmp",
"MerkleTree.Low.offsets_connect",
"MerkleTree.Low.__proj__MT__item__offset",
"Prims.op_Equality",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.__proj__Path__item__hash_size",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.__proj__MT__item__i",
"FStar.Integers.op_Less",
"MerkleTree.Low.__proj__MT__item__j",
"FStar.UInt32.t",
"LowStar.Vector.size_of",
"MerkleTree.Low.__proj__Path__item__hashes",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.index_t",
"MerkleTree.Low.split_offset",
"Prims.bool"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool | [] | MerkleTree.Low.mt_get_path_pre_nst | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mtv: MerkleTree.Low.merkle_tree ->
idx: MerkleTree.Low.offset_t ->
p: MerkleTree.Low.path ->
root: MerkleTree.Low.Datastructures.hash
-> Prims.bool | {
"end_col": 36,
"end_line": 1990,
"start_col": 2,
"start_line": 1986
} |
FStar.HyperStack.ST.ST | val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i | val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i = | true | null | false | let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.mt_get_path_step_pre_nst",
"Prims.bool",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True)) | [] | MerkleTree.Low.mt_get_path_step_pre | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mtr: FStar.Monotonic.HyperHeap.rid -> p: MerkleTree.Low.const_path_p -> i: LowStar.Vector.uint32_t
-> FStar.HyperStack.ST.ST Prims.bool | {
"end_col": 41,
"end_line": 1877,
"start_col": 39,
"start_line": 1875
} |
FStar.Pervasives.Lemma | val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p))) | val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 = | false | null | true | assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.path_p",
"LowStar.Monotonic.Buffer.loc",
"FStar.Monotonic.HyperStack.mem",
"Prims._assert",
"LowStar.Monotonic.Buffer.loc_includes",
"MerkleTree.Low.path_loc",
"LowStar.Vector.loc_vector",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.path",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.phashes",
"Prims.unit",
"LowStar.Monotonic.Buffer.loc_buffer"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul)) | [] | MerkleTree.Low.path_safe_init_preserved | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.path_p ->
dl: LowStar.Monotonic.Buffer.loc ->
h0: FStar.Monotonic.HyperStack.mem ->
h1: FStar.Monotonic.HyperStack.mem
-> FStar.Pervasives.Lemma
(requires
MerkleTree.Low.path_safe h0 mtr p /\
LowStar.Vector.size_of (MerkleTree.Low.phashes h0 p) = 0ul /\
LowStar.Monotonic.Buffer.loc_disjoint dl (MerkleTree.Low.path_loc p) /\
LowStar.Monotonic.Buffer.modifies dl h0 h1)
(ensures
MerkleTree.Low.path_safe h1 mtr p /\
LowStar.Vector.size_of (MerkleTree.Low.phashes h1 p) = 0ul) | {
"end_col": 66,
"end_line": 1216,
"start_col": 2,
"start_line": 1215
} |
FStar.Pervasives.Lemma | val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul) | val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_spec #_ h lv hs i j = | false | null | true | if lv = merkle_tree_size_lg then () else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma",
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.index_t",
"FStar.Integers.op_Greater_Equals",
"Prims.bool",
"MerkleTree.Low.mt_safe_elts_spec",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"FStar.Integers.op_Slash",
"Prims.unit"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2" | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 2,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv)) | [
"recursion"
] | MerkleTree.Low.mt_safe_elts_spec | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
h: FStar.Monotonic.HyperStack.mem ->
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
i: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{j >= i}
-> FStar.Pervasives.Lemma
(requires LowStar.RVector.rv_inv h hs /\ MerkleTree.Low.mt_safe_elts h lv hs i j)
(ensures
MerkleTree.New.High.hs_wf_elts (FStar.UInt32.v lv)
(LowStar.RVector.as_seq h hs)
(FStar.UInt32.v i)
(FStar.UInt32.v j))
(decreases 32 - FStar.UInt32.v lv) | {
"end_col": 60,
"end_line": 273,
"start_col": 2,
"start_line": 272
} |
FStar.HyperStack.ST.ST | val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt) | val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = | true | null | false | mt_flush_pre_nst !*(CB.cast mt) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.mt_flush_pre_nst",
"Prims.bool",
"MerkleTree.Low.merkle_tree",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True)) | [] | MerkleTree.Low.mt_flush_pre | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.const_mt_p -> FStar.HyperStack.ST.ST Prims.bool | {
"end_col": 53,
"end_line": 2500,
"start_col": 22,
"start_line": 2500
} |
Prims.GTot | val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1)))) | val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j = | false | null | false | if i = j
then S.empty
else (S.snoc (lift_path_ h hs i (j - 1)) (Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1)))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"sometrivial",
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims.l_and",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"LowStar.Vector.forall_seq",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"MerkleTree.Low.Datastructures.hreg",
"Prims.op_Equality",
"FStar.Seq.Base.empty",
"MerkleTree.New.High.hash",
"FStar.UInt32.v",
"Prims.bool",
"FStar.Seq.Properties.snoc",
"MerkleTree.Low.lift_path_",
"FStar.Integers.op_Subtraction",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"FStar.Seq.Base.index",
"MerkleTree.New.High.path",
"Prims.int"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j) | [
"recursion"
] | MerkleTree.Low.lift_path_ | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
h: FStar.Monotonic.HyperStack.mem ->
hs: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j:
FStar.Integers.nat
{ i <= j /\ j <= FStar.Seq.Base.length hs /\
LowStar.Vector.forall_seq hs
i
j
(fun hp -> Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h hp) }
-> Prims.GTot (hp: MerkleTree.New.High.path{FStar.Seq.Base.length hp = j - i}) | {
"end_col": 63,
"end_line": 1094,
"start_col": 2,
"start_line": 1092
} |
FStar.Pervasives.Lemma | val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i)) | val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j = | false | null | true | assert (forall (k: nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) == Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k: nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) == Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k: nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k == Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k: nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k == Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k: nat{k < j - i}). S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k: nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i)) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma"
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims._assert",
"Prims.l_Forall",
"Prims.b2t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Integers.op_Less",
"Prims.eq2",
"FStar.Seq.Base.index",
"FStar.Seq.Base.slice",
"FStar.Integers.op_Subtraction",
"Prims.unit",
"Spec.Hash.Definitions.bytes",
"Prims.l_or",
"Prims.op_Equality",
"Prims.int",
"Prims.op_GreaterThanOrEqual",
"Prims.op_GreaterThan",
"FStar.Seq.Base.length",
"Lib.IntTypes.uint8",
"FStar.UInt32.v",
"MerkleTree.New.High.hash",
"MerkleTree.Low.lift_path_",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Integers.op_Plus"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j))) | [] | MerkleTree.Low.lift_path_eq | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
h: FStar.Monotonic.HyperStack.mem ->
hs1: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
hs2: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j: FStar.Integers.nat
-> FStar.Pervasives.Lemma
(requires
i <= j /\ j <= FStar.Seq.Base.length hs1 /\ j <= FStar.Seq.Base.length hs2 /\
FStar.Seq.Base.equal (FStar.Seq.Base.slice hs1 i j) (FStar.Seq.Base.slice hs2 i j) /\
LowStar.Vector.forall_seq hs1
i
j
(fun hp -> Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h hp) /\
LowStar.Vector.forall_seq hs2
i
j
(fun hp -> Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h hp))
(ensures
FStar.Seq.Base.equal (MerkleTree.Low.lift_path_ h hs1 i j)
(MerkleTree.Low.lift_path_ h hs2 i j)) | {
"end_col": 82,
"end_line": 1161,
"start_col": 2,
"start_line": 1146
} |
FStar.HyperStack.ST.ST | val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ())) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt | val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r = | true | null | false | [@@ inline_let ]let hrg = hreg hsz in
[@@ inline_let ]let hvrg = hvreg hsz in
[@@ inline_let ]let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0uL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Ghost.erased",
"MerkleTree.Spec.hash_fun_t",
"FStar.UInt32.v",
"MerkleTree.Low.Hashfunctions.hash_fun_t",
"FStar.HyperStack.ST.erid",
"Prims.unit",
"MerkleTree.Low.mt_safe_elts_preserved",
"FStar.UInt32.__uint_to_t",
"LowStar.Monotonic.Buffer.loc_none",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hash",
"LowStar.RVector.as_seq_preserved",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.mt_p",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.Monotonic.Buffer.mbuffer",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"Prims.l_and",
"Prims.eq2",
"Prims.nat",
"LowStar.Monotonic.Buffer.length",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Prims.b2t",
"Prims.op_Negation",
"LowStar.Monotonic.Buffer.g_is_null",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Monotonic.Buffer.freeable",
"LowStar.Buffer.malloc",
"MerkleTree.Low.MT",
"FStar.UInt64.__uint_to_t",
"LowStar.Regional.rg_alloc",
"FStar.HyperStack.ST.new_region",
"LowStar.Vector.loc_vector",
"LowStar.Vector.loc_vector_within_included",
"LowStar.Vector.size_of",
"LowStar.RVector.rv_inv_preserved",
"Prims._assert",
"FStar.Seq.Base.seq",
"LowStar.Regional.__proj__Rgl__item__repr",
"LowStar.RVector.as_seq",
"FStar.Seq.Base.create",
"MerkleTree.New.High.hash_init",
"LowStar.RVector.rvector",
"LowStar.RVector.alloc_rid",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.mt_safe_elts_init",
"LowStar.Regional.regional",
"MerkleTree.Low.Datastructures.hash_vv",
"MerkleTree.Low.Datastructures.hvvreg",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.Datastructures.hreg"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ())) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ())) | [] | MerkleTree.Low.create_empty_mt | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
hash_size: MerkleTree.Low.Datastructures.hash_size_t ->
hash_spec: FStar.Ghost.erased MerkleTree.Spec.hash_fun_t ->
hash_fun: MerkleTree.Low.Hashfunctions.hash_fun_t ->
r: FStar.HyperStack.ST.erid
-> FStar.HyperStack.ST.ST MerkleTree.Low.mt_p | {
"end_col": 4,
"end_line": 376,
"start_col": 2,
"start_line": 349
} |
FStar.HyperStack.ST.ST | val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt | val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
let mt_create_custom hsz hash_spec r init hash_fun = | true | null | false | let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Ghost.erased",
"MerkleTree.Spec.hash_fun_t",
"FStar.UInt32.v",
"FStar.HyperStack.ST.erid",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Hashfunctions.hash_fun_t",
"MerkleTree.Low.mt_p",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"Prims.unit",
"MerkleTree.Low.mt_insert",
"FStar.Ghost.hide",
"MerkleTree.Low.create_empty_mt"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 40,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init))) | [] | MerkleTree.Low.mt_create_custom | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
hsz: MerkleTree.Low.Datastructures.hash_size_t ->
hash_spec: FStar.Ghost.erased MerkleTree.Spec.hash_fun_t ->
r: FStar.HyperStack.ST.erid ->
init: MerkleTree.Low.Datastructures.hash ->
hash_fun: MerkleTree.Low.Hashfunctions.hash_fun_t
-> FStar.HyperStack.ST.ST MerkleTree.Low.mt_p | {
"end_col": 4,
"end_line": 1043,
"start_col": 52,
"start_line": 1038
} |
FStar.HyperStack.ST.ST | val mt_retract_to_pre: mt:const_mt_p -> r:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_retract_to_pre mt r =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_retract_to_pre_nst mtv r | val mt_retract_to_pre: mt:const_mt_p -> r:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_retract_to_pre mt r = | true | null | false | let mt = CB.cast mt in
let h0 = HST.get () in
let mtv = !*mt in
mt_retract_to_pre_nst mtv r | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.offset_t",
"MerkleTree.Low.mt_retract_to_pre_nst",
"Prims.bool",
"MerkleTree.Low.merkle_tree",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo
#pop-options
/// Retraction
private
val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
private
let rec mt_retract_to_ #hsz hs lv i s j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs then ()
else begin
// 1) Retract hashes at level `lv`.
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get() in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs then
begin
assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
// 3-0) Memory safety brought from the postcondition of the recursion
let hh3 = HST.get () in
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs) (U32.v lv + 1)
(U32.v i / 2) (U32.v s / 2) (U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
else begin
let hh3 = HST.get() in
assert ((modifies (loc_union
(RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0 hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_
(RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
end
#pop-options
private inline_for_extraction
val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool
let mt_retract_to_pre_nst mtv r =
offsets_connect (MT?.offset mtv) r &&
([@inline_let] let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv)
val mt_retract_to_pre: mt:const_mt_p -> r:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_retract_to_pre: mt:const_mt_p -> r:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True)) | [] | MerkleTree.Low.mt_retract_to_pre | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.const_mt_p -> r: MerkleTree.Low.offset_t -> FStar.HyperStack.ST.ST Prims.bool | {
"end_col": 29,
"end_line": 2758,
"start_col": 28,
"start_line": 2754
} |
FStar.HyperStack.ST.ST | val mt_verify_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt)))
(ensures (fun _ _ _ -> True)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_verify_pre #hsz mt k j mtr p rt =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_verify_pre_nst mtv k j !*p rt | val mt_verify_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_verify_pre #hsz mt k j mtr p rt = | true | null | false | let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_verify_pre_nst mtv k j !*p rt | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.const_mt_p",
"EverCrypt.Helpers.uint64_t",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"MerkleTree.Low.mt_verify_pre_nst",
"Prims.bool",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"MerkleTree.Low.merkle_tree",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo
#pop-options
/// Retraction
private
val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
private
let rec mt_retract_to_ #hsz hs lv i s j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs then ()
else begin
// 1) Retract hashes at level `lv`.
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get() in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs then
begin
assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
// 3-0) Memory safety brought from the postcondition of the recursion
let hh3 = HST.get () in
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs) (U32.v lv + 1)
(U32.v i / 2) (U32.v s / 2) (U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
else begin
let hh3 = HST.get() in
assert ((modifies (loc_union
(RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0 hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_
(RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
end
#pop-options
private inline_for_extraction
val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool
let mt_retract_to_pre_nst mtv r =
offsets_connect (MT?.offset mtv) r &&
([@inline_let] let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv)
val mt_retract_to_pre: mt:const_mt_p -> r:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_retract_to_pre mt r =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_retract_to_pre_nst mtv r
#push-options "--z3rlimit 100"
val mt_retract_to:
mt:mt_p ->
r:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_retract_to_pre_nst (B.get h0 mt 0) r))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let r = split_offset off r in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_retract_to (mt_lift h0 mt) (U32.v r) == mt_lift h1 mt)))
let mt_retract_to mt r =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let r = split_offset offset r in
let hs = MT?.hs mtv in
mt_retract_to_ hs 0ul (MT?.i mtv) (r + 1ul) (MT?.j mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv) (MT?.offset mtv) (MT?.i mtv) (r+1ul) hs false (MT?.rhs mtv) (MT?.mroot mtv) (MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs (MT?.i mtv) (r+1ul) (B.loc_buffer mt) hh1 hh2
#pop-options
/// Client-side verification
private
val mt_verify_:
#hsz:hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
k:index_t ->
j:index_t{k <= j} ->
mtr:HH.rid ->
p:const_path_p ->
ppos:uint32_t ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
let p = CB.cast p in
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 acc /\
Path?.hash_size (B.get h0 p 0) = hsz /\
HH.disjoint (B.frameOf p) (B.frameOf acc) /\
HH.disjoint mtr (B.frameOf acc) /\
// Below is a very relaxed condition,
// but sufficient to ensure (+) for uint32_t is sound.
ppos <= 64ul - mt_path_length 0ul k j actd /\
ppos + mt_path_length 0ul k j actd <= V.size_of (phashes h0 p)))
(ensures (fun h0 _ h1 ->
let p = CB.cast p in
// memory safety
modifies (B.loc_all_regions_from false (B.frameOf acc)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
Rgl?.r_repr (hreg hsz) h1 acc ==
MTH.mt_verify_ #(U32.v hsz) #hash_spec (U32.v k) (U32.v j) (lift_path h0 mtr p)
(U32.v ppos) (Rgl?.r_repr (hreg hsz) h0 acc) actd))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let rec mt_verify_ #hsz #hash_spec k j mtr p ppos acc actd hash_fun =
let ncp:path_p = CB.cast p in
let hh0 = HST.get () in
if j = 0ul then ()
else (let nactd = actd || (j % 2ul = 1ul) in
if k % 2ul = 0ul then begin
if j = k || (j = k + 1ul && not actd) then
mt_verify_ (k / 2ul) (j / 2ul) mtr p ppos acc nactd hash_fun
else begin
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun acc phash acc;
let hh1 = HST.get () in
path_preserved mtr ncp
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (Rgl?.r_repr (hreg hsz) hh0 acc)
(S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos)));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun
end
end
else begin
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun phash acc acc;
let hh1 = HST.get () in
path_preserved mtr ncp
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos))
(Rgl?.r_repr (hreg hsz) hh0 acc));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun
end)
#pop-options
private inline_for_extraction
val mt_verify_pre_nst: mt:merkle_tree -> k:offset_t -> j:offset_t -> p:path -> rt:(hash #(MT?.hash_size mt)) -> Tot bool
let mt_verify_pre_nst mt k j p rt =
k < j &&
offsets_connect (MT?.offset mt) k &&
offsets_connect (MT?.offset mt) j &&
MT?.hash_size mt = Path?.hash_size p &&
([@inline_let] let k = split_offset (MT?.offset mt) k in
[@inline_let] let j = split_offset (MT?.offset mt) j in
// We need to add one since the first element is the hash to verify.
V.size_of (Path?.hashes p) = 1ul + mt_path_length 0ul k j false)
val mt_verify_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_verify_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt)))
(ensures (fun _ _ _ -> True)) | [] | MerkleTree.Low.mt_verify_pre | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mt: MerkleTree.Low.const_mt_p ->
k: EverCrypt.Helpers.uint64_t ->
j: EverCrypt.Helpers.uint64_t ->
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.const_path_p ->
rt: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.bool | {
"end_col": 34,
"end_line": 2922,
"start_col": 40,
"start_line": 2918
} |
FStar.Pervasives.Lemma | val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))] | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k | val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
let rec lift_path_index_ #hsz h hs i j k = | false | null | true | if i = j then () else if k = j - 1 then () else lift_path_index_ #hsz h hs i (j - 1) k | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"lemma",
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash",
"FStar.Integers.nat",
"Prims.b2t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Seq.Base.length",
"FStar.Integers.op_Less",
"Prims.op_Equality",
"Prims.bool",
"FStar.Integers.int_t",
"FStar.Integers.op_Subtraction",
"MerkleTree.Low.lift_path_index_",
"Prims.unit"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1" | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))] | [
"recursion"
] | MerkleTree.Low.lift_path_index_ | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
h: FStar.Monotonic.HyperStack.mem ->
hs: FStar.Seq.Base.seq MerkleTree.Low.Datastructures.hash ->
i: FStar.Integers.nat ->
j: FStar.Integers.nat{i <= j && j <= FStar.Seq.Base.length hs} ->
k: FStar.Integers.nat{i <= k && k < j}
-> FStar.Pervasives.Lemma
(requires
LowStar.Vector.forall_seq hs
i
j
(fun hp -> Rgl?.r_inv (MerkleTree.Low.Datastructures.hreg hsz) h hp))
(ensures
Rgl?.r_repr (MerkleTree.Low.Datastructures.hreg hsz) h (FStar.Seq.Base.index hs k) ==
FStar.Seq.Base.index (MerkleTree.Low.lift_path_ h hs i j) (k - i))
(decreases j)
[SMTPat (FStar.Seq.Base.index (MerkleTree.Low.lift_path_ h hs i j) (k - i))] | {
"end_col": 45,
"end_line": 1120,
"start_col": 2,
"start_line": 1118
} |
Prims.Tot | val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd) | val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
let rec mt_path_length lv k j actd = | false | null | false | if j = 0ul
then 0ul
else
(let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd + mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
"total",
""
] | [
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.index_t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.UInt32.v",
"Prims.pow2",
"FStar.Integers.op_Subtraction",
"Prims.bool",
"Prims.op_Equality",
"FStar.UInt32.t",
"FStar.UInt32.__uint_to_t",
"FStar.Integers.op_Plus",
"MerkleTree.Low.mt_path_length_step",
"MerkleTree.Low.mt_path_length",
"FStar.Integers.op_Slash",
"Prims.op_BarBar",
"FStar.Integers.op_Percent",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"FStar.UInt.size",
"FStar.UInt32.n",
"MerkleTree.New.High.mt_path_length"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1" | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 10,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j)) | [
"recursion"
] | MerkleTree.Low.mt_path_length | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
k: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{k <= j && FStar.UInt32.v j < Prims.pow2 (32 - FStar.UInt32.v lv)} ->
actd: Prims.bool
-> Prims.Tot
(l:
LowStar.Vector.uint32_t
{ FStar.UInt32.v l =
MerkleTree.New.High.mt_path_length (FStar.UInt32.v k) (FStar.UInt32.v j) actd &&
l <= 32ul - lv }) | {
"end_col": 59,
"end_line": 1775,
"start_col": 2,
"start_line": 1772
} |
FStar.HyperStack.ST.ST | val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt | val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
let mt_free mt = | true | null | false | let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@@ inline_let ]let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.mt_p",
"LowStar.Monotonic.Buffer.free",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"Prims.unit",
"LowStar.Regional.rg_free",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.Regional.regional",
"MerkleTree.Low.Datastructures.hreg",
"LowStar.RVector.free",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"LowStar.BufferOps.op_Bang_Star"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 0,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1)) | [] | MerkleTree.Low.mt_free | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.mt_p -> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 11,
"end_line": 392,
"start_col": 16,
"start_line": 386
} |
FStar.HyperStack.ST.ST | val mt_verify_:
#hsz:hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
k:index_t ->
j:index_t{k <= j} ->
mtr:HH.rid ->
p:const_path_p ->
ppos:uint32_t ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
let p = CB.cast p in
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 acc /\
Path?.hash_size (B.get h0 p 0) = hsz /\
HH.disjoint (B.frameOf p) (B.frameOf acc) /\
HH.disjoint mtr (B.frameOf acc) /\
// Below is a very relaxed condition,
// but sufficient to ensure (+) for uint32_t is sound.
ppos <= 64ul - mt_path_length 0ul k j actd /\
ppos + mt_path_length 0ul k j actd <= V.size_of (phashes h0 p)))
(ensures (fun h0 _ h1 ->
let p = CB.cast p in
// memory safety
modifies (B.loc_all_regions_from false (B.frameOf acc)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
Rgl?.r_repr (hreg hsz) h1 acc ==
MTH.mt_verify_ #(U32.v hsz) #hash_spec (U32.v k) (U32.v j) (lift_path h0 mtr p)
(U32.v ppos) (Rgl?.r_repr (hreg hsz) h0 acc) actd)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mt_verify_ #hsz #hash_spec k j mtr p ppos acc actd hash_fun =
let ncp:path_p = CB.cast p in
let hh0 = HST.get () in
if j = 0ul then ()
else (let nactd = actd || (j % 2ul = 1ul) in
if k % 2ul = 0ul then begin
if j = k || (j = k + 1ul && not actd) then
mt_verify_ (k / 2ul) (j / 2ul) mtr p ppos acc nactd hash_fun
else begin
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun acc phash acc;
let hh1 = HST.get () in
path_preserved mtr ncp
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (Rgl?.r_repr (hreg hsz) hh0 acc)
(S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos)));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun
end
end
else begin
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun phash acc acc;
let hh1 = HST.get () in
path_preserved mtr ncp
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos))
(Rgl?.r_repr (hreg hsz) hh0 acc));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun
end) | val mt_verify_:
#hsz:hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
k:index_t ->
j:index_t{k <= j} ->
mtr:HH.rid ->
p:const_path_p ->
ppos:uint32_t ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
let p = CB.cast p in
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 acc /\
Path?.hash_size (B.get h0 p 0) = hsz /\
HH.disjoint (B.frameOf p) (B.frameOf acc) /\
HH.disjoint mtr (B.frameOf acc) /\
// Below is a very relaxed condition,
// but sufficient to ensure (+) for uint32_t is sound.
ppos <= 64ul - mt_path_length 0ul k j actd /\
ppos + mt_path_length 0ul k j actd <= V.size_of (phashes h0 p)))
(ensures (fun h0 _ h1 ->
let p = CB.cast p in
// memory safety
modifies (B.loc_all_regions_from false (B.frameOf acc)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
Rgl?.r_repr (hreg hsz) h1 acc ==
MTH.mt_verify_ #(U32.v hsz) #hash_spec (U32.v k) (U32.v j) (lift_path h0 mtr p)
(U32.v ppos) (Rgl?.r_repr (hreg hsz) h0 acc) actd))
let rec mt_verify_ #hsz #hash_spec k j mtr p ppos acc actd hash_fun = | true | null | false | let ncp:path_p = CB.cast p in
let hh0 = HST.get () in
if j = 0ul
then ()
else
(let nactd = actd || (j % 2ul = 1ul) in
if k % 2ul = 0ul
then
if j = k || (j = k + 1ul && not actd)
then mt_verify_ (k / 2ul) (j / 2ul) mtr p ppos acc nactd hash_fun
else
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun acc phash acc;
let hh1 = HST.get () in
path_preserved mtr ncp (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (Rgl?.r_repr (hreg hsz) hh0 acc)
(S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos)));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun
else
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun phash acc acc;
let hh1 = HST.get () in
path_preserved mtr ncp (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos))
(Rgl?.r_repr (hreg hsz) hh0 acc));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Spec.hash_fun_t",
"FStar.UInt32.v",
"MerkleTree.Low.index_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.Datastructures.hash",
"Prims.bool",
"MerkleTree.Low.Hashfunctions.hash_fun_t",
"FStar.Ghost.hide",
"Prims.op_Equality",
"FStar.UInt32.t",
"FStar.UInt32.__uint_to_t",
"Prims.unit",
"FStar.Integers.op_Percent",
"Prims.op_BarBar",
"Prims.op_AmpAmp",
"FStar.Integers.op_Plus",
"Prims.op_Negation",
"MerkleTree.Low.mt_verify_",
"FStar.Integers.op_Slash",
"Prims._assert",
"Prims.eq2",
"Spec.Hash.Definitions.bytes",
"Prims.l_or",
"Prims.int",
"Prims.op_GreaterThanOrEqual",
"Prims.op_GreaterThan",
"FStar.Seq.Base.length",
"Lib.IntTypes.uint8",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Seq.Base.index",
"MerkleTree.New.High.hash",
"MerkleTree.Low.lift_path",
"MerkleTree.Low.lift_path_index",
"MerkleTree.Low.path_preserved",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Buffer.trivial_preorder",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Vector.index",
"MerkleTree.Low.__proj__Path__item__hashes",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"MerkleTree.Low.path_p",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo
#pop-options
/// Retraction
private
val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
private
let rec mt_retract_to_ #hsz hs lv i s j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs then ()
else begin
// 1) Retract hashes at level `lv`.
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get() in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs then
begin
assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
// 3-0) Memory safety brought from the postcondition of the recursion
let hh3 = HST.get () in
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs) (U32.v lv + 1)
(U32.v i / 2) (U32.v s / 2) (U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
else begin
let hh3 = HST.get() in
assert ((modifies (loc_union
(RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0 hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_
(RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
end
#pop-options
private inline_for_extraction
val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool
let mt_retract_to_pre_nst mtv r =
offsets_connect (MT?.offset mtv) r &&
([@inline_let] let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv)
val mt_retract_to_pre: mt:const_mt_p -> r:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_retract_to_pre mt r =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_retract_to_pre_nst mtv r
#push-options "--z3rlimit 100"
val mt_retract_to:
mt:mt_p ->
r:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_retract_to_pre_nst (B.get h0 mt 0) r))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let r = split_offset off r in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_retract_to (mt_lift h0 mt) (U32.v r) == mt_lift h1 mt)))
let mt_retract_to mt r =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let r = split_offset offset r in
let hs = MT?.hs mtv in
mt_retract_to_ hs 0ul (MT?.i mtv) (r + 1ul) (MT?.j mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv) (MT?.offset mtv) (MT?.i mtv) (r+1ul) hs false (MT?.rhs mtv) (MT?.mroot mtv) (MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs (MT?.i mtv) (r+1ul) (B.loc_buffer mt) hh1 hh2
#pop-options
/// Client-side verification
private
val mt_verify_:
#hsz:hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
k:index_t ->
j:index_t{k <= j} ->
mtr:HH.rid ->
p:const_path_p ->
ppos:uint32_t ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
let p = CB.cast p in
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 acc /\
Path?.hash_size (B.get h0 p 0) = hsz /\
HH.disjoint (B.frameOf p) (B.frameOf acc) /\
HH.disjoint mtr (B.frameOf acc) /\
// Below is a very relaxed condition,
// but sufficient to ensure (+) for uint32_t is sound.
ppos <= 64ul - mt_path_length 0ul k j actd /\
ppos + mt_path_length 0ul k j actd <= V.size_of (phashes h0 p)))
(ensures (fun h0 _ h1 ->
let p = CB.cast p in
// memory safety
modifies (B.loc_all_regions_from false (B.frameOf acc)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
Rgl?.r_repr (hreg hsz) h1 acc ==
MTH.mt_verify_ #(U32.v hsz) #hash_spec (U32.v k) (U32.v j) (lift_path h0 mtr p)
(U32.v ppos) (Rgl?.r_repr (hreg hsz) h0 acc) actd)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_verify_:
#hsz:hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
k:index_t ->
j:index_t{k <= j} ->
mtr:HH.rid ->
p:const_path_p ->
ppos:uint32_t ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
let p = CB.cast p in
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 acc /\
Path?.hash_size (B.get h0 p 0) = hsz /\
HH.disjoint (B.frameOf p) (B.frameOf acc) /\
HH.disjoint mtr (B.frameOf acc) /\
// Below is a very relaxed condition,
// but sufficient to ensure (+) for uint32_t is sound.
ppos <= 64ul - mt_path_length 0ul k j actd /\
ppos + mt_path_length 0ul k j actd <= V.size_of (phashes h0 p)))
(ensures (fun h0 _ h1 ->
let p = CB.cast p in
// memory safety
modifies (B.loc_all_regions_from false (B.frameOf acc)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
Rgl?.r_repr (hreg hsz) h1 acc ==
MTH.mt_verify_ #(U32.v hsz) #hash_spec (U32.v k) (U32.v j) (lift_path h0 mtr p)
(U32.v ppos) (Rgl?.r_repr (hreg hsz) h0 acc) actd)) | [
"recursion"
] | MerkleTree.Low.mt_verify_ | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
k: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{k <= j} ->
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.const_path_p ->
ppos: LowStar.Vector.uint32_t ->
acc: MerkleTree.Low.Datastructures.hash ->
actd: Prims.bool ->
hash_fun: MerkleTree.Low.Hashfunctions.hash_fun_t
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 11,
"end_line": 2883,
"start_col": 69,
"start_line": 2849
} |
FStar.HyperStack.ST.ST | val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2 | val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx = | true | null | false | let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved (MT?.rhs mtv)
(loc_union (RV.rv_loc_elems hh0 hs 0ul (V.size_of hs)) (V.loc_vector_within hs 0ul (V.size_of hs))
)
hh0
hh1;
RV.as_seq_preserved (MT?.rhs mtv)
(loc_union (RV.rv_loc_elems hh0 hs 0ul (V.size_of hs)) (V.loc_vector_within hs 0ul (V.size_of hs))
)
hh0
hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv))
(MT?.mroot mtv)
(loc_union (RV.rv_loc_elems hh0 hs 0ul (V.size_of hs)) (V.loc_vector_within hs 0ul (V.size_of hs))
)
hh0
hh1;
mt *=
MT (MT?.hash_size mtv)
(MT?.offset mtv)
idx
(MT?.j mtv)
hs
(MT?.rhs_ok mtv)
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2 | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.mt_p",
"MerkleTree.Low.offset_t",
"MerkleTree.Low.mt_safe_elts_preserved",
"MerkleTree.Low.__proj__MT__item__hash_size",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__j",
"LowStar.Monotonic.Buffer.loc_buffer",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"Prims.unit",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.RVector.as_seq_preserved",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.__proj__MT__item__hs",
"LowStar.RVector.rv_inv_preserved",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.BufferOps.op_Star_Equals",
"MerkleTree.Low.MT",
"MerkleTree.Low.__proj__MT__item__offset",
"MerkleTree.Low.__proj__MT__item__rhs_ok",
"MerkleTree.Low.__proj__MT__item__hash_spec",
"MerkleTree.Low.__proj__MT__item__hash_fun",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.RVector.rv_loc_elems",
"LowStar.Vector.size_of",
"LowStar.Vector.loc_vector_within",
"LowStar.Vector.loc_vector_within_included",
"LowStar.RVector.rv_loc_elems_included",
"MerkleTree.Low.mt_flush_to_",
"MerkleTree.Low.__proj__MT__item__i",
"FStar.Ghost.hide",
"MerkleTree.Low.index_t",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.b2t",
"Prims.op_Equality",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.split_offset",
"Prims.l_and",
"FStar.UInt32.lte",
"MerkleTree.Low.add64_fits",
"LowStar.BufferOps.op_Bang_Star"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\ | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt))) | [] | MerkleTree.Low.mt_flush_to | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.mt_p -> idx: MerkleTree.Low.offset_t -> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 73,
"end_line": 2492,
"start_col": 24,
"start_line": 2451
} |
FStar.HyperStack.ST.ST | val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v)))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv) | val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v = | true | null | false | let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
V.forall2_forall_left hh0
hs
0ul
(V.size_of hs)
lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1) (Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0
hs
0ul
(V.size_of hs)
lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1) (Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved (lv + 1ul)
hs
(Ghost.reveal i / 2ul)
(j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv))
hh0
hh1;
RV.rs_loc_elems_elem_disj (hvreg hsz)
(V.as_seq hh0 hs)
(V.frameOf hs)
0
(U32.v (V.size_of hs))
0
(U32.v lv)
(U32.v lv);
RV.rs_loc_elems_parent_disj (hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs) 0 (U32.v lv);
RV.rv_elems_inv_preserved hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj (hvreg hsz)
(V.as_seq hh0 hs)
(V.frameOf hs)
0
(U32.v (V.size_of hs))
(U32.v lv + 1)
(U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj (hvreg hsz)
(V.as_seq hh0 hs)
(V.frameOf hs)
(U32.v lv + 1)
(U32.v (V.size_of hs));
RV.rv_elems_inv_preserved hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
RV.assign hs lv ihv;
let hh2 = HST.get () in
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0
hh2);
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved hs
(lv + 1ul)
(V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul))
hh1
hh2;
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved (lv + 1ul)
hs
(Ghost.reveal i / 2ul)
(j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul))
hh1
hh2;
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append (RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv) (RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"FStar.Ghost.erased",
"MerkleTree.Low.index_t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Ghost.reveal",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.UInt32.v",
"FStar.Integers.op_Subtraction",
"Prims.pow2",
"MerkleTree.Low.uint32_32_max",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.as_seq_sub_upd",
"MerkleTree.Low.Datastructures.hvreg",
"LowStar.RVector.as_seq",
"MerkleTree.Low.Datastructures.hreg",
"Prims.unit",
"Prims._assert",
"FStar.Seq.Base.equal",
"LowStar.Regional.__proj__Rgl__item__repr",
"FStar.Seq.Base.append",
"LowStar.RVector.as_seq_sub",
"FStar.UInt32.__uint_to_t",
"FStar.Seq.Properties.cons",
"FStar.Integers.op_Plus",
"LowStar.RVector.as_seq_sub_preserved",
"LowStar.RVector.loc_rvector",
"MerkleTree.Low.mt_safe_elts_preserved",
"FStar.Integers.op_Slash",
"LowStar.Vector.loc_vector_within",
"Prims.eq2",
"FStar.UInt32.t",
"LowStar.Vector.get",
"MerkleTree.Low.offset_of",
"LowStar.RVector.rv_loc_elems_preserved",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.RVector.rs_loc_elem",
"LowStar.Vector.as_seq",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.RVector.assign",
"FStar.Seq.Properties.snoc",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"LowStar.RVector.rv_elems_inv",
"LowStar.RVector.rv_elems_inv_preserved",
"LowStar.RVector.rs_loc_elems_parent_disj",
"LowStar.Vector.frameOf",
"LowStar.RVector.rs_loc_elems_elem_disj",
"LowStar.Vector.loc_vector_within_disjoint",
"LowStar.Vector.loc_vector_within_included",
"LowStar.Vector.forall2_forall_right",
"FStar.Monotonic.HyperHeap.disjoint",
"LowStar.Regional.__proj__Rgl__item__region_of",
"LowStar.Vector.forall2_forall_left",
"LowStar.RVector.rvector",
"LowStar.RVector.insert_copy",
"MerkleTree.Low.Datastructures.hcpy",
"Prims.op_Negation",
"LowStar.Vector.is_full",
"LowStar.Vector.index",
"MerkleTree.Low.mt_safe_elts_rec"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v)))) | [] | MerkleTree.Low.hash_vv_insert_copy | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
lv: LowStar.Vector.uint32_t{lv < MerkleTree.Low.merkle_tree_size_lg} ->
i: FStar.Ghost.erased MerkleTree.Low.index_t ->
j:
MerkleTree.Low.index_t
{ FStar.Ghost.reveal i <= j && FStar.UInt32.v j < Prims.pow2 (32 - FStar.UInt32.v lv) - 1 &&
j < MerkleTree.Low.uint32_32_max } ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
v: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 46,
"end_line": 563,
"start_col": 42,
"start_line": 466
} |
FStar.HyperStack.ST.ST | val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mt_retract_to_ #hsz hs lv i s j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs then ()
else begin
// 1) Retract hashes at level `lv`.
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get() in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs then
begin
assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
// 3-0) Memory safety brought from the postcondition of the recursion
let hh3 = HST.get () in
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs) (U32.v lv + 1)
(U32.v i / 2) (U32.v s / 2) (U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
else begin
let hh3 = HST.get() in
assert ((modifies (loc_union
(RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0 hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_
(RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
end | val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
let rec mt_retract_to_ #hsz hs lv i s j = | true | null | false | let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs
then ()
else
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
V.forall2_forall_left hh0
hs
0ul
(V.size_of hs)
lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1) (Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0
hs
0ul
(V.size_of hs)
lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1) (Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved hs
0ul
lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec) (Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0
hh1;
V.forall_preserved hs
(lv + 1ul)
(V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec) (Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0
hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
RV.rv_loc_elems_preserved hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved (lv + 1ul)
hs
(i / 2ul)
(j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv))
hh0
hh1;
RV.rs_loc_elems_elem_disj (hvreg hsz)
(V.as_seq hh0 hs)
(V.frameOf hs)
0
(U32.v (V.size_of hs))
0
(U32.v lv)
(U32.v lv);
RV.rs_loc_elems_parent_disj (hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs) 0 (U32.v lv);
RV.rv_elems_inv_preserved hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj (hvreg hsz)
(V.as_seq hh0 hs)
(V.frameOf hs)
0
(U32.v (V.size_of hs))
(U32.v lv + 1)
(U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj (hvreg hsz)
(V.as_seq hh0 hs)
(V.frameOf hs)
(U32.v lv + 1)
(U32.v (V.size_of hs));
RV.rv_elems_inv_preserved hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get () in
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0
hh2);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved hs
(lv + 1ul)
(V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul))
hh1
hh2;
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved (lv + 1ul)
hs
(i / 2ul)
(j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul))
hh1
hh2;
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append (RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted) (RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))
));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs
then
(assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
let hh3 = HST.get () in
assert (modifies (loc_union (loc_union (RV.rs_loc_elem (hvreg hsz)
(V.as_seq hh0 hs)
(U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union (RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0
hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint (V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs
lv
(loc_union (RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2
hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs)
(U32.v lv + 1)
(U32.v i / 2)
(U32.v s / 2)
(U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv) (U32.v i) (U32.v s) (U32.v j))))
else
let hh3 = HST.get () in
assert ((modifies (loc_union (RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0
hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv) (U32.v i) (U32.v s) (U32.v j))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.b2t",
"Prims.op_Equality",
"LowStar.Vector.uint32_t",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.merkle_tree_size_lg",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.index_t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Prims.pow2",
"FStar.Integers.op_Subtraction",
"FStar.UInt32.v",
"FStar.Integers.op_Greater_Equals",
"Prims.unit",
"Prims.bool",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"Prims._assert",
"FStar.Seq.Base.equal",
"LowStar.Regional.__proj__Rgl__item__repr",
"MerkleTree.Low.Datastructures.hvreg",
"LowStar.RVector.as_seq",
"MerkleTree.New.High.mt_retract_to_",
"MerkleTree.Low.mt_safe_elts_spec",
"MerkleTree.Low.mt_safe_elts",
"LowStar.RVector.rv_inv",
"Prims.l_imp",
"FStar.Seq.Base.length",
"FStar.Integers.op_Slash",
"MerkleTree.Low.mt_safe_elts_constr",
"Prims.eq2",
"FStar.UInt32.t",
"MerkleTree.Low.Datastructures.hash",
"LowStar.Vector.get",
"MerkleTree.Low.offset_of",
"LowStar.Vector.get_preserved",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.RVector.rv_loc_elems",
"LowStar.Vector.loc_vector_within",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.RVector.rv_loc_elems_included",
"LowStar.Vector.loc_vector_within_included",
"LowStar.Vector.loc_vector_within_disjoint",
"MerkleTree.Low.mt_flush_to_modifies_rec_helper",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.RVector.rs_loc_elem",
"LowStar.Vector.as_seq",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.Low.mt_retract_to_",
"Prims.l_and",
"MerkleTree.Low.as_seq_sub_upd",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Seq.Base.append",
"LowStar.RVector.as_seq_sub",
"FStar.Seq.Properties.cons",
"LowStar.RVector.as_seq_sub_preserved",
"LowStar.RVector.loc_rvector",
"MerkleTree.Low.mt_safe_elts_preserved",
"LowStar.RVector.rv_loc_elems_preserved",
"LowStar.RVector.assign",
"FStar.Seq.Base.slice",
"LowStar.RVector.elems_reg",
"LowStar.RVector.rv_itself_inv",
"LowStar.RVector.rv_elems_inv",
"LowStar.RVector.rv_elems_inv_preserved",
"LowStar.RVector.rs_loc_elems_parent_disj",
"LowStar.Vector.frameOf",
"LowStar.RVector.rs_loc_elems_elem_disj",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Regional.__proj__Rgl__item__region_of",
"LowStar.Vector.forall_preserved",
"FStar.Monotonic.HyperHeap.disjoint",
"LowStar.Vector.forall2_forall_right",
"LowStar.Vector.forall2_forall_left",
"LowStar.RVector.rvector",
"LowStar.RVector.shrink",
"FStar.Integers.int_t",
"LowStar.Vector.index",
"MerkleTree.Low.mt_safe_elts_rec"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo
#pop-options
/// Retraction
private
val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1" | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv)) | [
"recursion"
] | MerkleTree.Low.mt_retract_to_ | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
lv: LowStar.Vector.uint32_t{lv < LowStar.Vector.size_of hs} ->
i: MerkleTree.Low.index_t ->
s: MerkleTree.Low.index_t ->
j:
MerkleTree.Low.index_t
{ i <= s && s <= j &&
Lib.IntTypes.v j <
Prims.pow2 (FStar.UInt32.v (LowStar.Vector.size_of hs) - Lib.IntTypes.v lv) }
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 5,
"end_line": 2741,
"start_col": 41,
"start_line": 2561
} |
FStar.HyperStack.ST.ST | val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt)))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end | val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
let mt_get_root #hsz mt rt = | true | null | false | let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then
(Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt (B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt (B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt)))
else
(construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved hs
(loc_union (RV.loc_rvector rhs) (B.loc_all_regions_from false (B.frameOf rt)))
hh0
hh1;
RV.as_seq_preserved hs
(loc_union (RV.loc_rvector rhs) (B.loc_all_regions_from false (B.frameOf rt)))
hh0
hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul
hs
i
j
(loc_union (RV.loc_rvector rhs) (B.loc_all_regions_from false (B.frameOf rt)))
hh0
hh1;
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size)
#hash_spec
0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt)
false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
RV.rv_inv_preserved hs (B.loc_all_regions_from false (B.frameOf mroot)) hh1 hh2;
RV.rv_inv_preserved rhs (B.loc_all_regions_from false (B.frameOf mroot)) hh1 hh2;
RV.as_seq_preserved hs (B.loc_all_regions_from false (B.frameOf mroot)) hh1 hh2;
RV.as_seq_preserved rhs (B.loc_all_regions_from false (B.frameOf mroot)) hh1 hh2;
B.modifies_buffer_elim rt (B.loc_all_regions_from false (B.frameOf mroot)) hh1 hh2;
mt_safe_elts_preserved 0ul hs i j (B.loc_all_regions_from false (B.frameOf mroot)) hh1 hh2;
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j (B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
MTH.mt_get_root_rhs_ok_false (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i)
(U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"MerkleTree.Low.__proj__MT__item__rhs_ok",
"Prims._assert",
"Prims.eq2",
"FStar.Pervasives.Native.tuple2",
"MerkleTree.New.High.merkle_tree",
"FStar.UInt32.v",
"MerkleTree.Low.__proj__MT__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.New.High.hash",
"MerkleTree.New.High.mt_get_root",
"MerkleTree.Low.mt_lift",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Pervasives.Native.Mktuple2",
"Prims.unit",
"MerkleTree.New.High.mt_get_root_rhs_ok_true",
"MerkleTree.Low.mt_preserved",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Regional.__proj__Rgl__item__region_of",
"MerkleTree.Low.mt_safe_preserved",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"LowStar.RVector.__proj__Cpy__item__copy",
"MerkleTree.Low.Datastructures.hcpy",
"Prims.bool",
"MerkleTree.New.High.MT",
"LowStar.RVector.as_seq",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Spec.hash_fun_t",
"MerkleTree.New.High.mt_get_root_rhs_ok_false",
"MerkleTree.Low.mt_safe",
"MerkleTree.Low.mt_safe_elts_preserved",
"FStar.UInt32.__uint_to_t",
"LowStar.Monotonic.Buffer.loc_buffer",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"LowStar.RVector.as_seq_preserved",
"LowStar.RVector.rv_inv_preserved",
"LowStar.BufferOps.op_Star_Equals",
"MerkleTree.Low.MT",
"LowStar.Regional.__proj__Rgl__item__repr",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"LowStar.Monotonic.Buffer.modifies_buffer_elim",
"MerkleTree.New.High.hashes",
"Prims.b2t",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.length",
"MerkleTree.New.High.construct_rhs",
"LowStar.Regional.regional",
"MerkleTree.Low.Datastructures.hash_vv",
"MerkleTree.Low.Datastructures.hvvreg",
"MerkleTree.Low.mt_safe_elts_spec",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.RVector.loc_rvector",
"LowStar.Vector.loc_vector_within_included",
"LowStar.Vector.size_of",
"LowStar.Monotonic.Buffer.live",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"LowStar.RVector.rv_inv",
"MerkleTree.Low.construct_rhs",
"MerkleTree.Low.Hashfunctions.hash_fun_t",
"MerkleTree.Low.__proj__MT__item__hash_spec",
"MerkleTree.Low.__proj__MT__item__hash_fun",
"MerkleTree.Low.__proj__MT__item__mroot",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.__proj__MT__item__hs",
"MerkleTree.Low.index_t",
"Prims.l_and",
"FStar.UInt32.lte",
"MerkleTree.Low.__proj__MT__item__i",
"MerkleTree.Low.add64_fits",
"MerkleTree.Low.__proj__MT__item__offset",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.Low.offset_t",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt)))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 150,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt)))) | [] | MerkleTree.Low.mt_get_root | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.const_mt_p -> rt: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 5,
"end_line": 1682,
"start_col": 28,
"start_line": 1571
} |
FStar.HyperStack.ST.ST | val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd))) | val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd = | true | null | false | let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul
then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step (U32.v lv)
(RV.as_seq hh0 hs)
(RV.as_seq hh0 rhs)
(U32.v i)
(U32.v j)
(U32.v k)
(lift_path hh0 mtr p)
actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul)
mtr
hs
rhs
(i / 2ul)
(j / 2ul)
(k / 2ul)
p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs)
(RV.as_seq hh1 rhs)
(U32.v i / 2)
(U32.v j / 2)
(U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs)
(RV.as_seq hh0 rhs)
(U32.v i)
(U32.v j)
(U32.v k)
(lift_path hh0 mtr p)
actd))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.index_t",
"Prims.l_and",
"FStar.Integers.op_Less",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.UInt32.v",
"Prims.pow2",
"FStar.Integers.op_Subtraction",
"Prims.op_AmpAmp",
"MerkleTree.Low.path_p",
"Prims.bool",
"FStar.UInt32.t",
"FStar.UInt32.__uint_to_t",
"Prims.unit",
"Prims._assert",
"FStar.Seq.Base.equal",
"MerkleTree.New.High.hash",
"MerkleTree.Low.lift_path",
"MerkleTree.New.High.mt_get_path_",
"LowStar.RVector.as_seq",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Integers.op_Plus",
"FStar.Integers.op_Slash",
"Prims.int",
"FStar.Integers.op_Percent",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.Low.mt_get_path_",
"MerkleTree.Low.mt_safe_elts_spec",
"MerkleTree.Low.mt_safe_elts_rec",
"MerkleTree.Low.mt_safe_elts",
"MerkleTree.Low.mt_safe_elts_preserved",
"MerkleTree.Low.path_loc",
"LowStar.Vector.loc_vector_within_included",
"LowStar.RVector.as_seq_preserved",
"LowStar.RVector.rv_inv_preserved",
"MerkleTree.New.High.mt_make_path_step",
"MerkleTree.Low.mt_make_path_step",
"MerkleTree.Low.offset_of"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 2,
"max_fuel": 1,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv)) | [
"recursion"
] | MerkleTree.Low.mt_get_path_ | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
mtr: FStar.Monotonic.HyperHeap.rid ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
rhs:
MerkleTree.Low.Datastructures.hash_vec
{LowStar.Vector.size_of rhs = MerkleTree.Low.merkle_tree_size_lg} ->
i: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{i <= j /\ FStar.UInt32.v j < Prims.pow2 (32 - FStar.UInt32.v lv)} ->
k: MerkleTree.Low.index_t{i <= k && k <= j} ->
p: MerkleTree.Low.path_p ->
actd: Prims.bool
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 51,
"end_line": 1975,
"start_col": 54,
"start_line": 1934
} |
FStar.HyperStack.ST.ST | val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo | val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
let mt_flush mt = | true | null | false | let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.mt_p",
"MerkleTree.Low.mt_flush_to",
"Prims.unit",
"MerkleTree.Low.offset_t",
"Prims.b2t",
"MerkleTree.Low.offsets_connect",
"MerkleTree.Low.join_offset",
"Prims._assert",
"FStar.UInt.fits",
"FStar.Integers.op_Plus",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.UInt64.v",
"FStar.UInt32.v",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W64",
"MerkleTree.Low.uint64_max",
"FStar.Integers.W32",
"MerkleTree.Low.uint32_32_max",
"FStar.Integers.int_t",
"FStar.Integers.op_Subtraction",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.index_t",
"Prims.l_and",
"FStar.UInt32.lte",
"MerkleTree.Low.__proj__MT__item__i",
"MerkleTree.Low.add64_fits",
"MerkleTree.Low.__proj__MT__item__offset",
"MerkleTree.Low.__proj__MT__item__j",
"MerkleTree.Low.merkle_tree",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.Buffer.trivial_preorder"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt)) | [] | MerkleTree.Low.mt_flush | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} | mt: MerkleTree.Low.mt_p -> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 19,
"end_line": 2525,
"start_col": 17,
"start_line": 2516
} |
FStar.HyperStack.ST.ST | val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end | val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
let rec mt_flush_to_ hsz lv hs pi i j = | true | null | false | let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi
then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
V.forall2_forall_left hh0
hs
0ul
(V.size_of hs)
lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1) (Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0
hs
0ul
(V.size_of hs)
lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1) (Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved hs
0ul
lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec) (Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0
hh1;
V.forall_preserved hs
(lv + 1ul)
(V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec) (Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0
hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
RV.rv_loc_elems_preserved hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
assert (V.size_of flushed == Ghost.reveal j - offset_of i);
mt_safe_elts_preserved (lv + 1ul)
hs
(pi / 2ul)
(Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv))
hh0
hh1;
RV.rs_loc_elems_elem_disj (hvreg hsz)
(V.as_seq hh0 hs)
(V.frameOf hs)
0
(U32.v (V.size_of hs))
0
(U32.v lv)
(U32.v lv);
RV.rs_loc_elems_parent_disj (hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs) 0 (U32.v lv);
RV.rv_elems_inv_preserved hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj (hvreg hsz)
(V.as_seq hh0 hs)
(V.frameOf hs)
0
(U32.v (V.size_of hs))
(U32.v lv + 1)
(U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj (hvreg hsz)
(V.as_seq hh0 hs)
(V.frameOf hs)
(U32.v lv + 1)
(U32.v (V.size_of hs));
RV.rv_elems_inv_preserved hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv))
(U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
RV.assign hs lv flushed;
let hh2 = HST.get () in
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0
hh2);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved hs
(lv + 1ul)
(V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul))
hh1
hh2;
assert (V.size_of (V.get hh2 hs lv) == Ghost.reveal j - offset_of i);
mt_safe_elts_preserved (lv + 1ul)
hs
(pi / 2ul)
(Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul))
hh1
hh2;
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append (RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed) (RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul) (Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
assert (modifies (loc_union (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union (RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0
hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint (V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs
lv
(loc_union (RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2
hh3;
assert (V.size_of (V.get hh3 hs lv) == Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1)
(RV.as_seq hh2 hs)
(U32.v pi / 2)
(U32.v i / 2)
(U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec (U32.v lv) (RV.as_seq hh0 hs) (U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv)
(RV.as_seq hh0 hs)
(U32.v pi)
(U32.v i)
(U32.v (Ghost.reveal j)))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.index_t",
"FStar.Integers.op_Greater_Equals",
"FStar.Ghost.erased",
"Prims.op_AmpAmp",
"FStar.Ghost.reveal",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.UInt32.v",
"Prims.pow2",
"FStar.Integers.op_Subtraction",
"MerkleTree.Low.mt_safe_elts_spec",
"Prims.unit",
"Prims.bool",
"Prims._assert",
"FStar.Seq.Base.equal",
"LowStar.Regional.__proj__Rgl__item__repr",
"MerkleTree.Low.Datastructures.hvreg",
"LowStar.RVector.as_seq",
"MerkleTree.New.High.mt_flush_to_",
"MerkleTree.New.High.mt_flush_to_rec",
"FStar.Integers.op_Plus",
"FStar.Integers.op_Slash",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.mt_safe_elts",
"MerkleTree.Low.mt_safe_elts_constr",
"LowStar.RVector.rv_inv",
"Prims.eq2",
"FStar.UInt32.t",
"MerkleTree.Low.Datastructures.hash",
"LowStar.Vector.get",
"MerkleTree.Low.offset_of",
"LowStar.Vector.get_preserved",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.RVector.rv_loc_elems",
"LowStar.Vector.loc_vector_within",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.RVector.rv_loc_elems_included",
"LowStar.Vector.loc_vector_within_included",
"LowStar.Vector.loc_vector_within_disjoint",
"MerkleTree.Low.mt_flush_to_modifies_rec_helper",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.RVector.rs_loc_elem",
"LowStar.Vector.as_seq",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.Low.mt_flush_to_",
"FStar.Ghost.hide",
"MerkleTree.Low.as_seq_sub_upd",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Seq.Base.append",
"LowStar.RVector.as_seq_sub",
"FStar.Seq.Properties.cons",
"LowStar.RVector.as_seq_sub_preserved",
"LowStar.RVector.loc_rvector",
"MerkleTree.Low.mt_safe_elts_preserved",
"LowStar.RVector.rv_loc_elems_preserved",
"LowStar.RVector.assign",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"LowStar.RVector.elems_reg",
"LowStar.RVector.rv_itself_inv",
"LowStar.RVector.rv_elems_inv",
"LowStar.RVector.rv_elems_inv_preserved",
"LowStar.RVector.rs_loc_elems_parent_disj",
"LowStar.Vector.frameOf",
"LowStar.RVector.rs_loc_elems_elem_disj",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Regional.__proj__Rgl__item__region_of",
"LowStar.Vector.forall_preserved",
"FStar.Monotonic.HyperHeap.disjoint",
"LowStar.Vector.forall2_forall_right",
"LowStar.Vector.forall2_forall_left",
"LowStar.RVector.rvector",
"MerkleTree.Low.VectorExtras.rv_flush_inplace",
"LowStar.Vector.index",
"FStar.Integers.int_t",
"MerkleTree.Low.mt_safe_elts_rec"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 1500,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i)) | [
"recursion"
] | MerkleTree.Low.mt_flush_to_ | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
hsz: MerkleTree.Low.Datastructures.hash_size_t ->
lv: LowStar.Vector.uint32_t{lv < MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
pi: MerkleTree.Low.index_t ->
i: MerkleTree.Low.index_t{i >= pi} ->
j:
FStar.Ghost.erased MerkleTree.Low.index_t
{ FStar.Ghost.reveal j >= i &&
FStar.UInt32.v (FStar.Ghost.reveal j) < Prims.pow2 (32 - FStar.UInt32.v lv) }
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 5,
"end_line": 2403,
"start_col": 39,
"start_line": 2229
} |
FStar.HyperStack.ST.ST | val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j | val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
let mt_get_path #hsz mt idx p root = | true | null | false | let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe
p
(B.loc_union (mt_loc ncmt) (B.loc_all_regions_from false (B.frameOf root)))
hh0
hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert (lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
mt_get_path_loc_union_helper (loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) == 1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union (loc_union (mt_loc ncmt) (B.loc_all_regions_from false (B.frameOf root))
)
(path_loc p))
hh0
hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) == 1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0
(RV.as_seq hh2 hs)
(RV.as_seq hh2 rhs)
(U32.v i)
(U32.v j)
(U32.v idx)
(lift_path hh2 mtframe p)
false));
assert (MTH.mt_get_path (mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)), lift_path hh3 mtframe p, Rgl?.r_repr (hreg hsz) hh3 root));
j | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Low.const_mt_p",
"MerkleTree.Low.offset_t",
"MerkleTree.Low.path_p",
"MerkleTree.Low.Datastructures.hash",
"FStar.Ghost.reveal",
"Prims.unit",
"Prims._assert",
"Prims.eq2",
"FStar.Pervasives.Native.tuple3",
"Prims.nat",
"MerkleTree.New.High.path",
"FStar.UInt32.v",
"MerkleTree.Low.__proj__MT__item__hash_size",
"LowStar.Monotonic.Buffer.get",
"MerkleTree.Low.merkle_tree",
"LowStar.Buffer.trivial_preorder",
"Prims.b2t",
"Prims.op_Equality",
"Prims.int",
"FStar.Seq.Base.length",
"MerkleTree.New.High.hash",
"Prims.op_Addition",
"MerkleTree.New.High.mt_path_length",
"MerkleTree.New.High.__proj__MT__item__j",
"MerkleTree.Low.mt_lift",
"MerkleTree.New.High.mt_get_path",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Pervasives.Native.Mktuple3",
"MerkleTree.Low.__proj__MT__item__j",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"MerkleTree.Low.lift_path",
"FStar.Seq.Base.equal",
"MerkleTree.New.High.mt_get_path_",
"LowStar.RVector.as_seq",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.mt_safe_elts_spec",
"FStar.UInt32.__uint_to_t",
"FStar.UInt32.t",
"LowStar.Vector.size_of",
"MerkleTree.Low.__proj__Path__item__hash_size",
"MerkleTree.Low.path",
"MerkleTree.Low.phashes",
"FStar.Integers.op_Plus",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.mt_path_length",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"MerkleTree.Low.path_safe",
"MerkleTree.Low.mt_safe",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_union",
"MerkleTree.Low.mt_loc",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"MerkleTree.Low.path_loc",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"MerkleTree.Low.mt_preserved",
"MerkleTree.Low.mt_safe_preserved",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"MerkleTree.Low.mt_get_path_loc_union_helper",
"MerkleTree.Low.index_t",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.Low.mt_get_path_",
"MerkleTree.New.High.path_insert",
"FStar.Seq.Base.index",
"LowStar.Regional.__proj__Rgl__item__repr",
"FStar.Integers.op_Subtraction",
"MerkleTree.Low.mt_path_insert",
"LowStar.Vector.index",
"LowStar.Vector.vector",
"MerkleTree.Low.Datastructures.hash_vv_as_seq_get_index",
"MerkleTree.Low.Datastructures.hash_vv_rv_inv_r_inv",
"MerkleTree.Low.Datastructures.hash_vv_rv_inv_includes",
"FStar.Integers.op_Less",
"LowStar.Vector.get",
"MerkleTree.Low.mt_safe_elts",
"LowStar.Vector.uint32_t",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.__proj__MT__item__rhs",
"MerkleTree.Low.Datastructures.hash_vv",
"MerkleTree.Low.__proj__MT__item__hs",
"Prims.l_and",
"FStar.UInt32.lte",
"MerkleTree.Low.__proj__MT__item__i",
"MerkleTree.Low.add64_fits",
"MerkleTree.Low.__proj__MT__item__offset",
"MerkleTree.Low.offset_of",
"MerkleTree.Low.split_offset",
"FStar.Seq.Base.empty",
"FStar.Pervasives.Native.tuple2",
"MerkleTree.New.High.merkle_tree",
"MerkleTree.New.High.mt_get_root",
"FStar.Pervasives.Native.Mktuple2",
"MerkleTree.Low.path_safe_init_preserved",
"LowStar.BufferOps.op_Bang_Star",
"MerkleTree.Low.mt_get_root",
"FStar.Monotonic.HyperHeap.rid",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 300,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root))) | [] | MerkleTree.Low.mt_get_path | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mt: MerkleTree.Low.const_mt_p ->
idx: MerkleTree.Low.offset_t ->
p: MerkleTree.Low.path_p ->
root: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST MerkleTree.Low.index_t | {
"end_col": 3,
"end_line": 2161,
"start_col": 36,
"start_line": 2075
} |
FStar.HyperStack.ST.ST | val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) | val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun = | true | null | false | let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then
(insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) == S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies (loc_union (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0
hh2);
RV.rv_inv_preserved hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved hs
(lv + 1ul)
(V.size_of hs)
(B.loc_region_only false (B.frameOf acc))
hh1
hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
V.get_preserved hs lv (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
mt_safe_elts_preserved (lv + 1ul)
hs
(Ghost.reveal i / 2ul)
(j / 2ul)
(B.loc_region_only false (B.frameOf acc))
hh1
hh2;
insert_snoc_last_helper (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc)
((Ghost.reveal hash_spec) (S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
insert_ (lv + 1ul) (Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul) hs acc hash_fun;
let hh3 = HST.get () in
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union (loc_union (RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2
hh3);
assert (modifies (loc_union (loc_union (loc_union (RV.rs_loc_elem (hvreg hsz)
(V.as_seq hh0 hs)
(U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union (loc_union (RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0
hh3);
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint (V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint (V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs
lv
(loc_union (loc_union (V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2
hh3;
assert (V.size_of (V.get hh3 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh3 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul + 1ul));
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz)
#(Ghost.reveal hash_spec)
(U32.v lv + 1)
(U32.v (Ghost.reveal i) / 2)
(U32.v j / 2)
(RV.as_seq hh2 hs)
(Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz)
#(Ghost.reveal hash_spec)
(U32.v lv)
(U32.v (Ghost.reveal i))
(U32.v j)
(RV.as_seq hh0 hs)
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz)
#(Ghost.reveal hash_spec)
(U32.v lv)
(U32.v (Ghost.reveal i))
(U32.v j)
(RV.as_seq hh0 hs)
(Rgl?.r_repr (hreg hsz) hh0 acc))))
else
(insert_index_helper_even lv j;
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies (loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0
hh1);
insert_modifies_union_loc_weakening (loc_union (RV.rs_loc_elem (hvreg hsz)
(V.as_seq hh0 hs)
(U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union (loc_union (RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0
hh1;
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz)
#(Ghost.reveal hash_spec)
(U32.v lv)
(U32.v (Ghost.reveal i))
(U32.v j)
(RV.as_seq hh0 hs)
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz)
#(Ghost.reveal hash_spec)
(U32.v lv)
(U32.v (Ghost.reveal i))
(U32.v j)
(RV.as_seq hh0 hs)
(Rgl?.r_repr (hreg hsz) hh0 acc))));
let hh4 = HST.get () in
assert (modifies (loc_union (loc_union (loc_union (RV.rs_loc_elem (hvreg hsz)
(V.as_seq hh0 hs)
(U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union (loc_union (RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0
hh4);
insert_modifies_rec_helper lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz)
#hash_spec
(U32.v lv)
(U32.v (Ghost.reveal i))
(U32.v j)
(RV.as_seq hh0 hs)
(Rgl?.r_repr (hreg hsz) hh0 acc))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Ghost.erased",
"MerkleTree.Spec.hash_fun_t",
"FStar.UInt32.v",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.index_t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less_Equals",
"FStar.Ghost.reveal",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.Integers.op_Subtraction",
"Prims.pow2",
"MerkleTree.Low.uint32_32_max",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.Hashfunctions.hash_fun_t",
"Prims._assert",
"FStar.Seq.Base.equal",
"LowStar.Regional.__proj__Rgl__item__repr",
"MerkleTree.Low.Datastructures.hvreg",
"LowStar.RVector.as_seq",
"MerkleTree.New.High.insert_",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"MerkleTree.Low.Datastructures.hreg",
"Prims.unit",
"MerkleTree.Low.mt_safe_elts_spec",
"LowStar.Regional.__proj__Rgl__item__r_inv",
"LowStar.RVector.rv_inv",
"MerkleTree.Low.mt_safe_elts",
"FStar.Integers.op_Plus",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.insert_modifies_rec_helper",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"LowStar.Buffer.trivial_preorder",
"LowStar.Monotonic.Buffer.modifies",
"LowStar.Monotonic.Buffer.loc_union",
"LowStar.RVector.rs_loc_elem",
"LowStar.Vector.as_seq",
"LowStar.Vector.loc_vector_within",
"LowStar.RVector.rv_loc_elems",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"FStar.UInt32.t",
"FStar.Integers.op_Percent",
"MerkleTree.New.High.insert_rec",
"FStar.Integers.op_Slash",
"MerkleTree.Low.mt_safe_elts_constr",
"Prims.eq2",
"LowStar.Vector.get",
"MerkleTree.Low.offset_of",
"LowStar.Vector.get_preserved",
"LowStar.Monotonic.Buffer.loc_disjoint",
"LowStar.RVector.rv_loc_elems_included",
"MerkleTree.Low.rv_inv_rv_elems_reg",
"MerkleTree.Low.insert_",
"FStar.Ghost.hide",
"FStar.Seq.Properties.last",
"MerkleTree.New.High.hash",
"FStar.Seq.Base.index",
"MerkleTree.Low.insert_snoc_last_helper",
"MerkleTree.Low.mt_safe_elts_preserved",
"LowStar.Monotonic.Buffer.loc_region_only",
"LowStar.RVector.rv_loc_elems_preserved",
"LowStar.RVector.as_seq_preserved",
"LowStar.RVector.rv_inv_preserved",
"LowStar.Vector.index",
"MerkleTree.Low.Datastructures.hash_vec_rv_inv_r_inv",
"FStar.Integers.op_Greater",
"Prims.int",
"FStar.Seq.Base.length",
"MerkleTree.Low.insert_index_helper_odd",
"Prims.bool",
"MerkleTree.New.High.insert_base",
"MerkleTree.Low.insert_modifies_union_loc_weakening",
"MerkleTree.Low.insert_index_helper_even",
"LowStar.Vector.loc_vector_within_disjoint",
"LowStar.Vector.loc_vector_within_included",
"MerkleTree.Low.hash_vv_insert_copy"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 800,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j)) | [
"recursion"
] | MerkleTree.Low.insert_ | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
lv: LowStar.Vector.uint32_t{lv < MerkleTree.Low.merkle_tree_size_lg} ->
i: FStar.Ghost.erased MerkleTree.Low.index_t ->
j:
MerkleTree.Low.index_t
{ FStar.Ghost.reveal i <= j && FStar.UInt32.v j < Prims.pow2 (32 - FStar.UInt32.v lv) - 1 &&
j < MerkleTree.Low.uint32_32_max } ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
acc: MerkleTree.Low.Datastructures.hash ->
hash_fun: MerkleTree.Low.Hashfunctions.hash_fun_t
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 73,
"end_line": 922,
"start_col": 56,
"start_line": 734
} |
FStar.HyperStack.ST.ST | val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd))))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end | val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd = | true | null | false | let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then
(hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr (B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert (Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs)))
else
if k = j
then ()
else
if k + 1ul = j
then
(if actd
then
(assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else
(hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr (B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.index_t",
"Prims.l_and",
"Prims.op_disEquality",
"FStar.UInt32.t",
"FStar.UInt32.__uint_to_t",
"FStar.Integers.op_Less",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"FStar.UInt32.v",
"Prims.pow2",
"FStar.Integers.op_Subtraction",
"Prims.op_AmpAmp",
"MerkleTree.Low.path_p",
"Prims.bool",
"FStar.Integers.op_Percent",
"MerkleTree.Low.mt_path_insert",
"Prims.unit",
"LowStar.Vector.index",
"LowStar.Vector.vector",
"Prims._assert",
"MerkleTree.Low.__proj__Path__item__hash_size",
"FStar.Monotonic.HyperHeap.includes",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"LowStar.Buffer.trivial_preorder",
"LowStar.Vector.get",
"MerkleTree.Low.Datastructures.hash_vv_rv_inv_includes",
"Prims.l_or",
"FStar.Integers.op_Plus",
"MerkleTree.Low.offset_of",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd))))) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 2,
"max_fuel": 1,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd))))) | [] | MerkleTree.Low.mt_make_path_step | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
mtr: FStar.Monotonic.HyperHeap.rid ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
rhs:
MerkleTree.Low.Datastructures.hash_vec
{LowStar.Vector.size_of rhs = MerkleTree.Low.merkle_tree_size_lg} ->
i: MerkleTree.Low.index_t ->
j:
MerkleTree.Low.index_t
{j <> 0ul /\ i <= j /\ FStar.UInt32.v j < Prims.pow2 (32 - FStar.UInt32.v lv)} ->
k: MerkleTree.Low.index_t{i <= k && k <= j} ->
p: MerkleTree.Low.path_p ->
actd: Prims.bool
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 5,
"end_line": 1849,
"start_col": 55,
"start_line": 1827
} |
FStar.HyperStack.ST.ST | val mt_verify:
#hsz:Ghost.erased hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Ghost.reveal (MT?.hash_spec mtv0) == hash_spec /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt) /\
mt_verify_pre_nst (B.get h0 mt 0) k j (B.get h0 p 0) rt))
(ensures (fun h0 b h1 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
// memory safety:
// `rt` is not modified in this function, but we use a trick
// to allocate an auxiliary buffer in the extended region of `rt`.
modifies (B.loc_all_regions_from false (B.frameOf rt)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
S.equal (Rgl?.r_repr (hreg hsz) h0 rt) (Rgl?.r_repr (hreg hsz) h1 rt) /\
(let mtv = B.get h0 mt 0 in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
b <==> MTH.mt_verify #(U32.v hsz) #hash_spec (U32.v k) (U32.v j)
(lift_path h0 mtr p) (Rgl?.r_repr (hreg hsz) h0 rt)))) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mt_verify #_ #hash_spec mt k j mtr p rt =
let ncmt = CB.cast mt in
let ncp = CB.cast p in
let mtv = !*ncmt in
let hash_size = MT?.hash_size mtv in
let hrg = hreg hash_size in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
let hh0 = HST.get () in
let nrid = HST.new_region (B.frameOf rt) in
let ih = rg_alloc hrg nrid in
let pth = !*ncp in
assert (MT?.hash_size mtv = hash_size);
assert (Path?.hash_size pth = hash_size);
let first = V.index (Path?.hashes pth) 0ul in
Cpy?.copy (hcpy hash_size) hash_size first ih;
let hh1 = HST.get () in
path_safe_preserved
mtr ncp (B.loc_all_regions_from false (B.frameOf rt)) hh0 hh1;
path_preserved mtr ncp (B.loc_all_regions_from false (B.frameOf rt)) hh0 hh1;
lift_path_index hh0 mtr ncp 0ul;
assert (Rgl?.r_repr hrg hh1 ih == S.index (lift_path #hash_size hh0 mtr ncp) 0);
mt_verify_ #hash_size #hash_spec k j mtr p 1ul ih false (MT?.hash_fun mtv);
let hh2 = HST.get () in
assert (Rgl?.r_repr hrg hh2 ih ==
MTH.mt_verify_ #(U32.v hash_size) #hash_spec (U32.v k) (U32.v j) (lift_path hh1 mtr ncp)
1 (Rgl?.r_repr hrg hh1 ih) false);
let r = Lib.ByteBuffer.lbytes_eq #hash_size ih rt in
rg_free hrg ih;
r | val mt_verify:
#hsz:Ghost.erased hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Ghost.reveal (MT?.hash_spec mtv0) == hash_spec /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt) /\
mt_verify_pre_nst (B.get h0 mt 0) k j (B.get h0 p 0) rt))
(ensures (fun h0 b h1 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
// memory safety:
// `rt` is not modified in this function, but we use a trick
// to allocate an auxiliary buffer in the extended region of `rt`.
modifies (B.loc_all_regions_from false (B.frameOf rt)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
S.equal (Rgl?.r_repr (hreg hsz) h0 rt) (Rgl?.r_repr (hreg hsz) h1 rt) /\
(let mtv = B.get h0 mt 0 in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
b <==> MTH.mt_verify #(U32.v hsz) #hash_spec (U32.v k) (U32.v j)
(lift_path h0 mtr p) (Rgl?.r_repr (hreg hsz) h0 rt))))
let mt_verify #_ #hash_spec mt k j mtr p rt = | true | null | false | let ncmt = CB.cast mt in
let ncp = CB.cast p in
let mtv = !*ncmt in
let hash_size = MT?.hash_size mtv in
let hrg = hreg hash_size in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
let hh0 = HST.get () in
let nrid = HST.new_region (B.frameOf rt) in
let ih = rg_alloc hrg nrid in
let pth = !*ncp in
assert (MT?.hash_size mtv = hash_size);
assert (Path?.hash_size pth = hash_size);
let first = V.index (Path?.hashes pth) 0ul in
Cpy?.copy (hcpy hash_size) hash_size first ih;
let hh1 = HST.get () in
path_safe_preserved mtr ncp (B.loc_all_regions_from false (B.frameOf rt)) hh0 hh1;
path_preserved mtr ncp (B.loc_all_regions_from false (B.frameOf rt)) hh0 hh1;
lift_path_index hh0 mtr ncp 0ul;
assert (Rgl?.r_repr hrg hh1 ih == S.index (lift_path #hash_size hh0 mtr ncp) 0);
mt_verify_ #hash_size #hash_spec k j mtr p 1ul ih false (MT?.hash_fun mtv);
let hh2 = HST.get () in
assert (Rgl?.r_repr hrg hh2 ih ==
MTH.mt_verify_ #(U32.v hash_size)
#hash_spec
(U32.v k)
(U32.v j)
(lift_path hh1 mtr ncp)
1
(Rgl?.r_repr hrg hh1 ih)
false);
let r = Lib.ByteBuffer.lbytes_eq #hash_size ih rt in
rg_free hrg ih;
r | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [] | [
"FStar.Ghost.erased",
"MerkleTree.Low.Datastructures.hash_size_t",
"MerkleTree.Spec.hash_fun_t",
"FStar.UInt32.v",
"FStar.Ghost.reveal",
"MerkleTree.Low.const_mt_p",
"EverCrypt.Helpers.uint64_t",
"FStar.Monotonic.HyperHeap.rid",
"MerkleTree.Low.const_path_p",
"MerkleTree.Low.Datastructures.hash",
"Prims.bool",
"Prims.unit",
"LowStar.Regional.rg_free",
"Lib.ByteBuffer.lbytes_eq",
"Prims._assert",
"Prims.eq2",
"MerkleTree.New.High.hash",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"MerkleTree.New.High.mt_verify_",
"MerkleTree.Low.lift_path",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.Low.mt_verify_",
"FStar.UInt32.__uint_to_t",
"MerkleTree.Low.__proj__MT__item__hash_fun",
"FStar.Seq.Base.index",
"MerkleTree.Low.lift_path_index",
"MerkleTree.Low.path_preserved",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Monotonic.Buffer.frameOf",
"Lib.IntTypes.uint8",
"LowStar.Buffer.trivial_preorder",
"MerkleTree.Low.path_safe_preserved",
"LowStar.RVector.__proj__Cpy__item__copy",
"MerkleTree.Low.Datastructures.hreg",
"MerkleTree.Low.Datastructures.hcpy",
"MerkleTree.Low.__proj__Path__item__hash_size",
"LowStar.Vector.index",
"MerkleTree.Low.__proj__Path__item__hashes",
"Prims.b2t",
"Prims.op_Equality",
"MerkleTree.Low.__proj__MT__item__hash_size",
"MerkleTree.Low.path",
"LowStar.BufferOps.op_Bang_Star",
"LowStar.ConstBuffer.qbuf_pre",
"LowStar.ConstBuffer.as_qbuf",
"LowStar.Regional.rg_alloc",
"FStar.HyperStack.ST.new_region",
"MerkleTree.Low.index_t",
"MerkleTree.Low.split_offset",
"MerkleTree.Low.__proj__MT__item__offset",
"LowStar.Regional.regional",
"MerkleTree.Low.merkle_tree",
"LowStar.Monotonic.Buffer.mbuffer",
"LowStar.ConstBuffer.cast"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
#push-options "--z3rlimit 250 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end
#pop-options
private inline_for_extraction
val mt_get_root_pre_nst: mtv:merkle_tree -> rt:hash #(MT?.hash_size mtv) -> Tot bool
let mt_get_root_pre_nst mtv rt = true
val mt_get_root_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
MT?.hash_size (B.get h0 mt 0) = Ghost.reveal hsz /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_get_root_pre #hsz mt rt =
let mt = CB.cast mt in
let mt = !*mt in
let hsz = MT?.hash_size mt in
assert (MT?.hash_size mt = hsz);
mt_get_root_pre_nst mt rt
// `mt_get_root` returns the Merkle root. If it's already calculated with
// up-to-date hashes, the root is returned immediately. Otherwise it calls
// `construct_rhs` to build rightmost hashes and to calculate the Merkle root
// as well.
val mt_get_root:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
rt:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
mt_get_root_pre_nst dmt rt /\
mt_safe h0 mt /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HH.disjoint (B.frameOf mt) (B.frameOf rt)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf rt)))
h0 h1 /\
mt_safe h1 mt /\
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = (Ghost.reveal hsz) /\
MT?.hash_size mtv1 = (Ghost.reveal hsz) /\
MT?.i mtv1 = MT?.i mtv0 /\ MT?.j mtv1 = MT?.j mtv0 /\
MT?.hs mtv1 == MT?.hs mtv0 /\ MT?.rhs mtv1 == MT?.rhs mtv0 /\
MT?.offset mtv1 == MT?.offset mtv0 /\
MT?.rhs_ok mtv1 = true /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
MTH.mt_get_root (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 rt) ==
(mt_lift h1 mt, Rgl?.r_repr (hreg hsz) h1 rt))))
#push-options "--z3rlimit 150 --initial_fuel 1 --max_fuel 1"
let mt_get_root #hsz mt rt =
let mt = CB.cast mt in
let hh0 = HST.get () in
let mtv = !*mt in
let prefix = MT?.offset mtv in
let i = MT?.i mtv in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
let mroot = MT?.mroot mtv in
let hash_size = MT?.hash_size mtv in
let hash_spec = MT?.hash_spec mtv in
let hash_fun = MT?.hash_fun mtv in
if MT?.rhs_ok mtv
then begin
Cpy?.copy (hcpy hash_size) hash_size mroot rt;
let hh1 = HST.get () in
mt_safe_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
mt_preserved mt
(B.loc_all_regions_from false (Rgl?.region_of (hreg hsz) rt)) hh0 hh1;
MTH.mt_get_root_rhs_ok_true
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh1 mt, Rgl?.r_repr (hreg hsz) hh1 rt))
end
else begin
construct_rhs #hash_size #hash_spec 0ul hs rhs i j rt false hash_fun;
let hh1 = HST.get () in
// memory safety
assert (RV.rv_inv hh1 rhs);
assert (Rgl?.r_inv (hreg hsz) hh1 rt);
assert (B.live hh1 mt);
RV.rv_inv_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
RV.as_seq_preserved
hs (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved 0ul hs i j
(loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf rt)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 0ul hs i j;
assert (MTH.construct_rhs #(U32.v hash_size) #hash_spec 0
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 rt) false ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 rt));
Cpy?.copy (hcpy hash_size) hash_size rt mroot;
let hh2 = HST.get () in
// memory safety
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
B.modifies_buffer_elim
rt (B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
mt_safe_elts_preserved 0ul hs i j
(B.loc_all_regions_from false (B.frameOf mroot))
hh1 hh2;
// correctness
assert (Rgl?.r_repr (hreg hsz) hh2 mroot == Rgl?.r_repr (hreg hsz) hh1 rt);
mt *= MT hash_size prefix i j hs true rhs mroot hash_spec hash_fun;
let hh3 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) rt (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.rv_inv_preserved rhs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved hs (B.loc_buffer mt) hh2 hh3;
RV.as_seq_preserved rhs (B.loc_buffer mt) hh2 hh3;
Rgl?.r_sep (hreg hsz) mroot (B.loc_buffer mt) hh2 hh3;
mt_safe_elts_preserved 0ul hs i j
(B.loc_buffer mt) hh2 hh3;
assert (mt_safe hh3 mt);
// correctness
MTH.mt_get_root_rhs_ok_false
(mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt);
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(MTH.MT #(U32.v hash_size)
(U32.v i) (U32.v j)
(RV.as_seq hh0 hs)
true
(RV.as_seq hh1 rhs)
(Rgl?.r_repr (hreg hsz) hh1 rt)
hash_spec,
Rgl?.r_repr (hreg hsz) hh1 rt));
assert (MTH.mt_get_root (mt_lift hh0 mt) (Rgl?.r_repr (hreg hsz) hh0 rt) ==
(mt_lift hh3 mt, Rgl?.r_repr (hreg hsz) hh3 rt))
end
#pop-options
inline_for_extraction
val mt_path_insert:
#hsz:hash_size_t ->
mtr:HH.rid -> p:path_p -> hp:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
path_safe h0 mtr p /\
not (V.is_full (phashes h0 p)) /\
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.disjoint mtr (B.frameOf p) /\
HH.includes mtr (B.frameOf hp) /\
Path?.hash_size (B.get h0 p 0) = hsz))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
// correctness
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
(let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
V.size_of (phashes h1 p) = V.size_of (phashes h0 p) + 1ul /\
hsz = hsz0 /\ hsz = hsz1 /\
(let hspec:(S.seq (MTH.hash #(U32.v hsz))) = (MTH.path_insert #(U32.v hsz) before (Rgl?.r_repr (hreg hsz) h0 hp)) in
S.equal hspec after)))))
#push-options "--z3rlimit 20 --initial_fuel 1 --max_fuel 1"
let mt_path_insert #hsz mtr p hp =
let pth = !*p in
let pv = Path?.hashes pth in
let hh0 = HST.get () in
let ipv = V.insert pv hp in
let hh1 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
path_preserved_
mtr (V.as_seq hh0 pv) 0 (S.length (V.as_seq hh0 pv))
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
Rgl?.r_sep (hreg hsz) hp
(B.loc_all_regions_from false (V.frameOf ipv)) hh0 hh1;
p *= Path hsz ipv;
let hh2 = HST.get () in
path_safe_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
path_preserved_
mtr (V.as_seq hh1 ipv) 0 (S.length (V.as_seq hh1 ipv))
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
Rgl?.r_sep (hreg hsz) hp
(B.loc_region_only false (B.frameOf p)) hh1 hh2;
assert (S.equal (lift_path hh2 mtr p)
(lift_path_ hh1 (S.snoc (V.as_seq hh0 pv) hp)
0 (S.length (V.as_seq hh1 ipv))));
lift_path_eq hh1 (S.snoc (V.as_seq hh0 pv) hp) (V.as_seq hh0 pv)
0 (S.length (V.as_seq hh0 pv))
#pop-options
// For given a target index `k`, the number of elements (in the tree) `j`,
// and a boolean flag (to check the existence of rightmost hashes), we can
// calculate a required Merkle path length.
//
// `mt_path_length` is a postcondition of `mt_get_path`, and a precondition
// of `mt_verify`. For detailed description, see `mt_get_path` and `mt_verify`.
private
val mt_path_length_step:
k:index_t ->
j:index_t{k <= j} ->
actd:bool ->
Tot (sl:uint32_t{U32.v sl = MTH.mt_path_length_step (U32.v k) (U32.v j) actd})
let mt_path_length_step k j actd =
if j = 0ul then 0ul
else (if k % 2ul = 0ul
then (if j = k || (j = k + 1ul && not actd) then 0ul else 1ul)
else 1ul)
private inline_for_extraction
val mt_path_length:
lv:uint32_t{lv <= merkle_tree_size_lg} ->
k:index_t ->
j:index_t{k <= j && U32.v j < pow2 (32 - U32.v lv)} ->
actd:bool ->
Tot (l:uint32_t{
U32.v l = MTH.mt_path_length (U32.v k) (U32.v j) actd &&
l <= 32ul - lv})
(decreases (U32.v j))
#push-options "--z3rlimit 10 --initial_fuel 1 --max_fuel 1"
let rec mt_path_length lv k j actd =
if j = 0ul then 0ul
else (let nactd = actd || (j % 2ul = 1ul) in
mt_path_length_step k j actd +
mt_path_length (lv + 1ul) (k / 2ul) (j / 2ul) nactd)
#pop-options
val mt_get_path_length:
mtr:HH.rid ->
p:const_path_p ->
HST.ST uint32_t
(requires (fun h0 -> path_safe h0 mtr (CB.cast p)))
(ensures (fun h0 _ h1 -> True))
let mt_get_path_length mtr p =
let pd = !*(CB.cast p) in
V.size_of (Path?.hashes pd)
private inline_for_extraction
val mt_make_path_step:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j <> 0ul /\ i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) == V.size_of (phashes h0 p) + mt_path_length_step k j actd /\
V.size_of (phashes h1 p) <= lv + 2ul /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 2 --max_ifuel 2"
let mt_make_path_step #hsz lv mtr hs rhs i j k p actd =
let pth = !*p in
let hh0 = HST.get () in
let ofs = offset_of i in
if k % 2ul = 1ul
then begin
hash_vv_rv_inv_includes hh0 hs lv (k - 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k - 1ul - ofs))));
assert(Path?.hash_size pth = hsz);
mt_path_insert #hsz mtr p (V.index (V.index hs lv) (k - 1ul - ofs))
end
else begin
if k = j then ()
else if k + 1ul = j
then (if actd
then (assert (HH.includes mtr (B.frameOf (V.get hh0 rhs lv)));
mt_path_insert mtr p (V.index rhs lv)))
else (hash_vv_rv_inv_includes hh0 hs lv (k + 1ul - ofs);
assert (HH.includes mtr
(B.frameOf (V.get hh0 (V.get hh0 hs lv) (k + 1ul - ofs))));
mt_path_insert mtr p (V.index (V.index hs lv) (k + 1ul - ofs)))
end
#pop-options
private inline_for_extraction
val mt_get_path_step_pre_nst:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:path ->
i:uint32_t ->
Tot bool
let mt_get_path_step_pre_nst #hsz mtr p i =
i < V.size_of (Path?.hashes p)
val mt_get_path_step_pre:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST bool
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
mt_get_path_step_pre_nst #hsz mtr pv i)))
(ensures (fun _ _ _ -> True))
let mt_get_path_step_pre #hsz mtr p i =
let p = CB.cast p in
mt_get_path_step_pre_nst #hsz mtr !*p i
val mt_get_path_step:
#hsz:Ghost.erased hash_size_t ->
mtr:HH.rid ->
p:const_path_p ->
i:uint32_t ->
HST.ST (hash #hsz)
(requires (fun h0 ->
path_safe h0 mtr (CB.cast p) /\
(let pv = B.get h0 (CB.cast p) 0 in
Path?.hash_size pv = Ghost.reveal hsz /\
live h0 (Path?.hashes pv) /\
i < V.size_of (Path?.hashes pv))))
(ensures (fun h0 r h1 -> True ))
let mt_get_path_step #hsz mtr p i =
let pd = !*(CB.cast p) in
V.index #(hash #(Path?.hash_size pd)) (Path?.hashes pd) i
private
val mt_get_path_:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
mtr:HH.rid ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{i <= j /\ U32.v j < pow2 (32 - U32.v lv)} ->
k:index_t{i <= k && k <= j} ->
p:path_p ->
actd:bool ->
HST.ST unit
(requires (fun h0 ->
HH.includes mtr (V.frameOf hs) /\
HH.includes mtr (V.frameOf rhs) /\
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
mt_safe_elts h0 lv hs i j /\
path_safe h0 mtr p /\
Path?.hash_size (B.get h0 p 0) = hsz /\
V.size_of (phashes h0 p) <= lv + 1ul))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (path_loc p) h0 h1 /\
path_safe h1 mtr p /\
V.size_of (phashes h1 p) ==
V.size_of (phashes h0 p) + mt_path_length lv k j actd /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
(let hsz0 = Path?.hash_size (B.get h0 p 0) in
let hsz1 = Path?.hash_size (B.get h1 p 0) in
let before:(S.seq (MTH.hash #(U32.v hsz0))) = lift_path h0 mtr p in
let after:(S.seq (MTH.hash #(U32.v hsz1))) = lift_path h1 mtr p in
hsz = hsz0 /\ hsz = hsz1 /\
S.equal after
(MTH.mt_get_path_ (U32.v lv) (RV.as_seq h0 hs) (RV.as_seq h0 rhs)
(U32.v i) (U32.v j) (U32.v k) before actd)))))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1 --max_ifuel 2 --initial_ifuel 2"
let rec mt_get_path_ #hsz lv mtr hs rhs i j k p actd =
let hh0 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
let ofs = offset_of i in
if j = 0ul then ()
else
(mt_make_path_step lv mtr hs rhs i j k p actd;
let hh1 = HST.get () in
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (lift_path hh1 mtr p)
(MTH.mt_make_path_step
(U32.v lv) (RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd));
RV.rv_inv_preserved hs (path_loc p) hh0 hh1;
RV.rv_inv_preserved rhs (path_loc p) hh0 hh1;
RV.as_seq_preserved hs (path_loc p) hh0 hh1;
RV.as_seq_preserved rhs (path_loc p) hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (path_loc p) hh0 hh1;
assert (mt_safe_elts hh1 lv hs i j);
mt_safe_elts_rec hh1 lv hs i j;
mt_safe_elts_spec hh1 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_get_path_ (lv + 1ul) mtr hs rhs (i / 2ul) (j / 2ul) (k / 2ul) p
(if j % 2ul = 0ul then actd else true);
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv + 1)
(RV.as_seq hh1 hs) (RV.as_seq hh1 rhs)
(U32.v i / 2) (U32.v j / 2) (U32.v k / 2)
(lift_path hh1 mtr p)
(if U32.v j % 2 = 0 then actd else true)));
assert (S.equal (lift_path hh2 mtr p)
(MTH.mt_get_path_ (U32.v lv)
(RV.as_seq hh0 hs) (RV.as_seq hh0 rhs)
(U32.v i) (U32.v j) (U32.v k)
(lift_path hh0 mtr p) actd)))
#pop-options
private inline_for_extraction
val mt_get_path_pre_nst:
mtv:merkle_tree ->
idx:offset_t ->
p:path ->
root:(hash #(MT?.hash_size mtv)) ->
Tot bool
let mt_get_path_pre_nst mtv idx p root =
offsets_connect (MT?.offset mtv) idx &&
Path?.hash_size p = MT?.hash_size mtv &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
MT?.i mtv <= idx && idx < MT?.j mtv &&
V.size_of (Path?.hashes p) = 0ul)
val mt_get_path_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:const_path_p ->
root:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let dmt = B.get h0 mt 0 in
let dp = B.get h0 p 0 in
MT?.hash_size dmt = (Ghost.reveal hsz) /\
Path?.hash_size dp = (Ghost.reveal hsz) /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun _ _ _ -> True))
let mt_get_path_pre #_ mt idx p root =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_get_path_pre_nst mtv idx !*p root
val mt_get_path_loc_union_helper:
l1:loc -> l2:loc ->
Lemma (loc_union (loc_union l1 l2) l2 == loc_union l1 l2)
let mt_get_path_loc_union_helper l1 l2 = ()
// Construct a Merkle path for a given index `idx`, hashes `mt.hs`, and rightmost
// hashes `mt.rhs`. Note that this operation copies "pointers" into the Merkle tree
// to the output path.
#push-options "--z3rlimit 60"
val mt_get_path:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
idx:offset_t ->
p:path_p ->
root:hash #hsz ->
HST.ST index_t
(requires (fun h0 ->
let mt = CB.cast mt in
let dmt = B.get h0 mt 0 in
MT?.hash_size dmt = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
mt_get_path_pre_nst (B.get h0 mt 0) idx (B.get h0 p 0) root /\
mt_safe h0 mt /\
path_safe h0 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h0 root /\
HH.disjoint (B.frameOf root) (B.frameOf mt) /\
HH.disjoint (B.frameOf root) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
let mt = CB.cast mt in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let idx = split_offset (MT?.offset mtv0) idx in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Path?.hash_size (B.get h1 p 0) = Ghost.reveal hsz /\
// memory safety
modifies (loc_union
(loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
h0 h1 /\
mt_safe h1 mt /\
path_safe h1 (B.frameOf mt) p /\
Rgl?.r_inv (hreg hsz) h1 root /\
V.size_of (phashes h1 p) ==
1ul + mt_path_length 0ul idx (MT?.j mtv0) false /\
// correctness
(let sj, sp, srt =
MTH.mt_get_path
(mt_lift h0 mt) (U32.v idx) (Rgl?.r_repr (hreg hsz) h0 root) in
sj == U32.v (MT?.j mtv1) /\
S.equal sp (lift_path #hsz h1 (B.frameOf mt) p) /\
srt == Rgl?.r_repr (hreg hsz) h1 root)))
#pop-options
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
let mt_get_path #hsz mt idx p root =
let ncmt = CB.cast mt in
let mtframe = B.frameOf ncmt in
let hh0 = HST.get () in
mt_get_root mt root;
let mtv = !*ncmt in
let hsz = MT?.hash_size mtv in
let hh1 = HST.get () in
path_safe_init_preserved mtframe p
(B.loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
hh0 hh1;
assert (MTH.mt_get_root (mt_lift hh0 ncmt) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(mt_lift hh1 ncmt, Rgl?.r_repr (hreg hsz) hh1 root));
assert (S.equal (lift_path #hsz hh1 mtframe p) S.empty);
let idx = split_offset (MT?.offset mtv) idx in
let i = MT?.i mtv in
let ofs = offset_of (MT?.i mtv) in
let j = MT?.j mtv in
let hs = MT?.hs mtv in
let rhs = MT?.rhs mtv in
assert (mt_safe_elts hh1 0ul hs i j);
assert (V.size_of (V.get hh1 hs 0ul) == j - ofs);
assert (idx < j);
hash_vv_rv_inv_includes hh1 hs 0ul (idx - ofs);
hash_vv_rv_inv_r_inv hh1 hs 0ul (idx - ofs);
hash_vv_as_seq_get_index hh1 hs 0ul (idx - ofs);
let ih = V.index (V.index hs 0ul) (idx - ofs) in
mt_path_insert #hsz mtframe p ih;
let hh2 = HST.get () in
assert (S.equal (lift_path hh2 mtframe p)
(MTH.path_insert
(lift_path hh1 mtframe p)
(S.index (S.index (RV.as_seq hh1 hs) 0) (U32.v idx - U32.v ofs))));
Rgl?.r_sep (hreg hsz) root (path_loc p) hh1 hh2;
mt_safe_preserved ncmt (path_loc p) hh1 hh2;
mt_preserved ncmt (path_loc p) hh1 hh2;
assert (V.size_of (phashes hh2 p) == 1ul);
mt_get_path_ 0ul mtframe hs rhs i j idx p false;
let hh3 = HST.get () in
// memory safety
mt_get_path_loc_union_helper
(loc_union (mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p);
Rgl?.r_sep (hreg hsz) root (path_loc p) hh2 hh3;
mt_safe_preserved ncmt (path_loc p) hh2 hh3;
mt_preserved ncmt (path_loc p) hh2 hh3;
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
assert (S.length (lift_path #hsz hh3 mtframe p) ==
S.length (lift_path #hsz hh2 mtframe p) +
MTH.mt_path_length (U32.v idx) (U32.v (MT?.j (B.get hh0 ncmt 0))) false);
assert (modifies (loc_union
(loc_union
(mt_loc ncmt)
(B.loc_all_regions_from false (B.frameOf root)))
(path_loc p))
hh0 hh3);
assert (mt_safe hh3 ncmt);
assert (path_safe hh3 mtframe p);
assert (Rgl?.r_inv (hreg hsz) hh3 root);
assert (V.size_of (phashes hh3 p) ==
1ul + mt_path_length 0ul idx (MT?.j (B.get hh0 ncmt 0)) false);
// correctness
mt_safe_elts_spec hh2 0ul hs i j;
assert (S.equal (lift_path hh3 mtframe p)
(MTH.mt_get_path_ 0 (RV.as_seq hh2 hs) (RV.as_seq hh2 rhs)
(U32.v i) (U32.v j) (U32.v idx)
(lift_path hh2 mtframe p) false));
assert (MTH.mt_get_path
(mt_lift hh0 ncmt) (U32.v idx) (Rgl?.r_repr (hreg hsz) hh0 root) ==
(U32.v (MT?.j (B.get hh3 ncmt 0)),
lift_path hh3 mtframe p,
Rgl?.r_repr (hreg hsz) hh3 root));
j
#pop-options
/// Flushing
private val
mt_flush_to_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) ==
loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
#push-options "--initial_fuel 2 --max_fuel 2"
let mt_flush_to_modifies_rec_helper #hsz lv hs h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val mt_flush_to_:
hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
pi:index_t ->
i:index_t{i >= pi} ->
j:Ghost.erased index_t{
Ghost.reveal j >= i &&
U32.v (Ghost.reveal j) < pow2 (32 - U32.v lv)} ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs pi (Ghost.reveal j)))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1 /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i (Ghost.reveal j) /\
// correctness
(mt_safe_elts_spec h0 lv hs pi (Ghost.reveal j);
S.equal (RV.as_seq h1 hs)
(MTH.mt_flush_to_
(U32.v lv) (RV.as_seq h0 hs) (U32.v pi)
(U32.v i) (U32.v (Ghost.reveal j))))))
(decreases (U32.v i))
#restart-solver
#push-options "--z3rlimit 1500 --fuel 1 --ifuel 0"
let rec mt_flush_to_ hsz lv hs pi i j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs pi (Ghost.reveal j);
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
let oi = offset_of i in
let opi = offset_of pi in
if oi = opi then mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j)
else begin
/// 1) Flush hashes at the level `lv`, where the new vector is
/// not yet connected to `hs`.
let ofs = oi - opi in
let hvec = V.index hs lv in
let flushed:(rvector (hreg hsz)) = rv_flush_inplace hvec ofs in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) flushed);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of flushed == Ghost.reveal j - offset_of i); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 flushed)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) (U32.v ofs)
(S.length (RV.as_seq hh0 (V.get hh0 hs lv)))));
/// 2) Assign the flushed vector to `hs` at the level `lv`.
RV.assign hs lv flushed;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) ==
Ghost.reveal j - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector flushed) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector flushed) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 flushed)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 flushed);
// if `lv = 31` then `pi <= i <= j < 2` thus `oi = opi`,
// contradicting the branch.
assert (lv + 1ul < merkle_tree_size_lg);
assert (U32.v (Ghost.reveal j / 2ul) < pow2 (32 - U32.v (lv + 1ul)));
assert (RV.rv_inv hh2 hs);
assert (mt_safe_elts hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul));
/// 3) Recursion
mt_flush_to_ hsz (lv + 1ul) hs (pi / 2ul) (i / 2ul)
(Ghost.hide (Ghost.reveal j / 2ul));
let hh3 = HST.get () in
// 3-0) Memory safety brought from the postcondition of the recursion
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
Ghost.reveal j - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i (Ghost.reveal j);
assert (mt_safe_elts hh3 lv hs i (Ghost.reveal j));
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (pi / 2ul) (Ghost.reveal j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv + 1) (RV.as_seq hh2 hs)
(U32.v pi / 2) (U32.v i / 2) (U32.v (Ghost.reveal j) / 2)));
mt_safe_elts_spec hh0 lv hs pi (Ghost.reveal j);
MTH.mt_flush_to_rec
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j));
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_flush_to_ (U32.v lv) (RV.as_seq hh0 hs)
(U32.v pi) (U32.v i) (U32.v (Ghost.reveal j))))
end
#pop-options
// `mt_flush_to` flushes old hashes in the Merkle tree. It removes hash elements
// from `MT?.i` to **`offset_of (idx - 1)`**, but maintains the tree structure,
// i.e., the tree still holds some old internal hashes (compressed from old
// hashes) which are required to generate Merkle paths for remaining hashes.
//
// Note that `mt_flush_to` (and `mt_flush`) always remain at least one base hash
// elements. If there are `MT?.j` number of elements in the tree, because of the
// precondition `MT?.i <= idx < MT?.j` we still have `idx`-th element after
// flushing.
private inline_for_extraction
val mt_flush_to_pre_nst: mtv:merkle_tree -> idx:offset_t -> Tot bool
let mt_flush_to_pre_nst mtv idx =
offsets_connect (MT?.offset mtv) idx &&
([@inline_let] let idx = split_offset (MT?.offset mtv) idx in
idx >= MT?.i mtv &&
idx < MT?.j mtv)
val mt_flush_to_pre: mt:const_mt_p -> idx:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_flush_to_pre mt idx =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_flush_to_pre_nst mtv idx
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
val mt_flush_to:
mt:mt_p ->
idx:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_to_pre_nst (B.get h0 mt 0) idx))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let idx = split_offset off idx in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush_to (mt_lift h0 mt) (U32.v idx) == mt_lift h1 mt)))
let mt_flush_to mt idx =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let j = MT?.j mtv in
let hsz = MT?.hash_size mtv in
let idx = split_offset offset idx in
let hs = MT?.hs mtv in
mt_flush_to_ hsz 0ul hs (MT?.i mtv) idx (Ghost.hide (MT?.j mtv));
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv) idx (MT?.j mtv)
hs
(MT?.rhs_ok mtv) (MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs idx (MT?.j mtv) (B.loc_buffer mt) hh1 hh2
#pop-options
private inline_for_extraction
val mt_flush_pre_nst: mt:merkle_tree -> Tot bool
let mt_flush_pre_nst mt = MT?.j mt > MT?.i mt
val mt_flush_pre: mt:const_mt_p -> HST.ST bool (requires (fun h0 -> mt_safe h0 (CB.cast mt))) (ensures (fun _ _ _ -> True))
let mt_flush_pre mt = mt_flush_pre_nst !*(CB.cast mt)
val mt_flush:
mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_flush_pre_nst (B.get h0 mt 0)))
(ensures (fun h0 _ h1 ->
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_flush (mt_lift h0 mt) == mt_lift h1 mt))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let mt_flush mt =
let mtv = !*mt in
let off = MT?.offset mtv in
let j = MT?.j mtv in
let j1 = j - 1ul in
assert (j1 < uint32_32_max);
assert (off < uint64_max);
assert (UInt.fits (U64.v off + U32.v j1) 64);
let jo = join_offset off j1 in
mt_flush_to mt jo
#pop-options
/// Retraction
private
val mt_retract_to_:
#hsz:hash_size_t ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
lv:uint32_t{lv < V.size_of hs} ->
i:index_t ->
s:index_t ->
j:index_t{i <= s && s <= j && v j < pow2 (U32.v (V.size_of hs) - v lv)}
-> HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
mt_safe_elts h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
(modifies (loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
h0 h1) /\
RV.rv_inv h1 hs /\
mt_safe_elts h1 lv hs i s /\
// correctness
(mt_safe_elts_spec h0 lv hs i j;
S.equal (RV.as_seq h1 hs)
(MTH.mt_retract_to_
(RV.as_seq h0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
))
(decreases (U32.v merkle_tree_size_lg - U32.v lv))
#push-options "--z3rlimit 300 --initial_fuel 1 --max_fuel 1"
private
let rec mt_retract_to_ #hsz hs lv i s j =
let hh0 = HST.get () in
// Base conditions
mt_safe_elts_rec hh0 lv hs i j;
V.loc_vector_within_included hs 0ul lv;
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
if lv >= V.size_of hs then ()
else begin
// 1) Retract hashes at level `lv`.
let hvec = V.index hs lv in
let old_len = j - offset_of i in
let new_len = s - offset_of i in
let retracted = RV.shrink hvec new_len in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions for `RV.assign`
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall_preserved
hs 0ul lv
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
V.forall_preserved
hs (lv + 1ul) (V.size_of hs)
(fun b -> HH.disjoint (Rgl?.region_of (hvreg hsz) hvec)
(Rgl?.region_of (hvreg hsz) b))
(RV.loc_rvector hvec)
hh0 hh1;
assert (Rgl?.region_of (hvreg hsz) hvec == Rgl?.region_of (hvreg hsz) retracted);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of retracted == new_len);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
assert (rv_itself_inv hh1 hs);
assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 retracted)
(S.slice (RV.as_seq hh0 (V.get hh0 hs lv)) 0 (U32.v new_len)));
RV.assign hs lv retracted;
let hh2 = HST.get() in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == s - offset_of i);
mt_safe_elts_preserved
(lv + 1ul) hs (i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector retracted) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector retracted) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 retracted)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 retracted);
if lv + 1ul < V.size_of hs then
begin
assert (mt_safe_elts hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul));
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
mt_retract_to_ hs (lv + 1ul) (i / 2ul) (s / 2ul) (j / 2ul);
// 3-0) Memory safety brought from the postcondition of the recursion
let hh3 = HST.get () in
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))))
hh0 hh3);
mt_flush_to_modifies_rec_helper lv hs hh0;
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
V.loc_vector_within_included hs lv (lv + 1ul);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
V.get_preserved hs lv
(loc_union
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) == s - offset_of i);
assert (RV.rv_inv hh3 hs);
mt_safe_elts_constr hh3 lv hs i s;
assert (mt_safe_elts hh3 lv hs i s);
// 3-1) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (U32.v lv + 1 < S.length (RV.as_seq hh3 hs) ==>
S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh2 hs) (U32.v lv + 1)
(U32.v i / 2) (U32.v s / 2) (U32.v j / 2)));
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts hh0 lv hs i j);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_ (RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
else begin
let hh3 = HST.get() in
assert ((modifies (loc_union
(RV.rv_loc_elems hh0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
hh0 hh3));
assert (RV.rv_inv hh3 hs /\ mt_safe_elts hh3 lv hs i s);
mt_safe_elts_spec hh0 lv hs i j;
assert (S.equal (RV.as_seq hh3 hs)
(MTH.mt_retract_to_
(RV.as_seq hh0 hs) (U32.v lv)
(U32.v i) (U32.v s) (U32.v j)))
end
end
#pop-options
private inline_for_extraction
val mt_retract_to_pre_nst: mtv:merkle_tree -> r:offset_t -> Tot bool
let mt_retract_to_pre_nst mtv r =
offsets_connect (MT?.offset mtv) r &&
([@inline_let] let r = split_offset (MT?.offset mtv) r in
MT?.i mtv <= r && r < MT?.j mtv)
val mt_retract_to_pre: mt:const_mt_p -> r:offset_t -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt)))
(ensures (fun _ _ _ -> True))
let mt_retract_to_pre mt r =
let mt = CB.cast mt in
let h0 = HST.get() in
let mtv = !*mt in
mt_retract_to_pre_nst mtv r
#push-options "--z3rlimit 100"
val mt_retract_to:
mt:mt_p ->
r:offset_t ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt /\ mt_retract_to_pre_nst (B.get h0 mt 0) r))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
// correctness
(let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
let off = MT?.offset mtv0 in
let r = split_offset off r in
MT?.hash_size mtv0 = MT?.hash_size mtv1 /\
MTH.mt_retract_to (mt_lift h0 mt) (U32.v r) == mt_lift h1 mt)))
let mt_retract_to mt r =
let hh0 = HST.get () in
let mtv = !*mt in
let offset = MT?.offset mtv in
let r = split_offset offset r in
let hs = MT?.hs mtv in
mt_retract_to_ hs 0ul (MT?.i mtv) (r + 1ul) (MT?.j mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 hs 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv)
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv) (MT?.offset mtv) (MT?.i mtv) (r+1ul) hs false (MT?.rhs mtv) (MT?.mroot mtv) (MT?.hash_spec mtv) (MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved (MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved 0ul hs (MT?.i mtv) (r+1ul) (B.loc_buffer mt) hh1 hh2
#pop-options
/// Client-side verification
private
val mt_verify_:
#hsz:hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
k:index_t ->
j:index_t{k <= j} ->
mtr:HH.rid ->
p:const_path_p ->
ppos:uint32_t ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
let p = CB.cast p in
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 acc /\
Path?.hash_size (B.get h0 p 0) = hsz /\
HH.disjoint (B.frameOf p) (B.frameOf acc) /\
HH.disjoint mtr (B.frameOf acc) /\
// Below is a very relaxed condition,
// but sufficient to ensure (+) for uint32_t is sound.
ppos <= 64ul - mt_path_length 0ul k j actd /\
ppos + mt_path_length 0ul k j actd <= V.size_of (phashes h0 p)))
(ensures (fun h0 _ h1 ->
let p = CB.cast p in
// memory safety
modifies (B.loc_all_regions_from false (B.frameOf acc)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
Rgl?.r_repr (hreg hsz) h1 acc ==
MTH.mt_verify_ #(U32.v hsz) #hash_spec (U32.v k) (U32.v j) (lift_path h0 mtr p)
(U32.v ppos) (Rgl?.r_repr (hreg hsz) h0 acc) actd))
#push-options "--z3rlimit 200 --initial_fuel 1 --max_fuel 1"
let rec mt_verify_ #hsz #hash_spec k j mtr p ppos acc actd hash_fun =
let ncp:path_p = CB.cast p in
let hh0 = HST.get () in
if j = 0ul then ()
else (let nactd = actd || (j % 2ul = 1ul) in
if k % 2ul = 0ul then begin
if j = k || (j = k + 1ul && not actd) then
mt_verify_ (k / 2ul) (j / 2ul) mtr p ppos acc nactd hash_fun
else begin
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun acc phash acc;
let hh1 = HST.get () in
path_preserved mtr ncp
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (Rgl?.r_repr (hreg hsz) hh0 acc)
(S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos)));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun
end
end
else begin
let ncpd = !*ncp in
let phash = V.index (Path?.hashes ncpd) ppos in
hash_fun phash acc acc;
let hh1 = HST.get () in
path_preserved mtr ncp
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
lift_path_index hh0 mtr ncp ppos;
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
hash_spec (S.index (lift_path #hsz hh0 mtr ncp) (U32.v ppos))
(Rgl?.r_repr (hreg hsz) hh0 acc));
mt_verify_ (k / 2ul) (j / 2ul) mtr p (ppos + 1ul) acc nactd hash_fun
end)
#pop-options
private inline_for_extraction
val mt_verify_pre_nst: mt:merkle_tree -> k:offset_t -> j:offset_t -> p:path -> rt:(hash #(MT?.hash_size mt)) -> Tot bool
let mt_verify_pre_nst mt k j p rt =
k < j &&
offsets_connect (MT?.offset mt) k &&
offsets_connect (MT?.offset mt) j &&
MT?.hash_size mt = Path?.hash_size p &&
([@inline_let] let k = split_offset (MT?.offset mt) k in
[@inline_let] let j = split_offset (MT?.offset mt) j in
// We need to add one since the first element is the hash to verify.
V.size_of (Path?.hashes p) = 1ul + mt_path_length 0ul k j false)
val mt_verify_pre:
#hsz:Ghost.erased hash_size_t ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt)))
(ensures (fun _ _ _ -> True))
let mt_verify_pre #hsz mt k j mtr p rt =
let mt = CB.cast mt in
let p = CB.cast p in
let mtv = !*mt in
mt_verify_pre_nst mtv k j !*p rt
// `mt_verify` verifies a Merkle path `p` with given target index `k` and
// the number of elements `j`. It recursively iterates the path with an
// accumulator `acc` (a compressed hash).
//
// Note that `mt_path_length` is given as a precondition of this operation.
// This is a postcondition of `mt_get_path` so we can call `mt_verify` with
// every path generated by `mt_get_path`.
#push-options "--z3rlimit 20"
val mt_verify:
#hsz:Ghost.erased hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Ghost.reveal (MT?.hash_spec mtv0) == hash_spec /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt) /\
mt_verify_pre_nst (B.get h0 mt 0) k j (B.get h0 p 0) rt))
(ensures (fun h0 b h1 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
// memory safety:
// `rt` is not modified in this function, but we use a trick
// to allocate an auxiliary buffer in the extended region of `rt`.
modifies (B.loc_all_regions_from false (B.frameOf rt)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
S.equal (Rgl?.r_repr (hreg hsz) h0 rt) (Rgl?.r_repr (hreg hsz) h1 rt) /\
(let mtv = B.get h0 mt 0 in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
b <==> MTH.mt_verify #(U32.v hsz) #hash_spec (U32.v k) (U32.v j)
(lift_path h0 mtr p) (Rgl?.r_repr (hreg hsz) h0 rt))))
#pop-options | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 2,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 200,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mt_verify:
#hsz:Ghost.erased hash_size_t ->
#hash_spec:MTS.hash_fun_t #(U32.v hsz) ->
mt:const_mt_p ->
k:uint64_t ->
j:uint64_t ->
mtr:HH.rid ->
p:const_path_p ->
rt:hash #hsz ->
HST.ST bool
(requires (fun h0 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
Path?.hash_size (B.get h0 p 0) = Ghost.reveal hsz /\
Ghost.reveal (MT?.hash_spec mtv0) == hash_spec /\
mt_safe h0 mt /\
path_safe h0 mtr p /\ Rgl?.r_inv (hreg hsz) h0 rt /\
HST.is_eternal_region (B.frameOf rt) /\
HH.disjoint (B.frameOf p) (B.frameOf rt) /\
HH.disjoint mtr (B.frameOf rt) /\
mt_verify_pre_nst (B.get h0 mt 0) k j (B.get h0 p 0) rt))
(ensures (fun h0 b h1 ->
let mt = CB.cast mt in
let p = CB.cast p in
let mtv0 = B.get h0 mt 0 in
let mtv1 = B.get h1 mt 0 in
MT?.hash_size mtv0 = Ghost.reveal hsz /\
MT?.hash_size mtv1 = Ghost.reveal hsz /\
// memory safety:
// `rt` is not modified in this function, but we use a trick
// to allocate an auxiliary buffer in the extended region of `rt`.
modifies (B.loc_all_regions_from false (B.frameOf rt)) h0 h1 /\
Rgl?.r_inv (hreg hsz) h1 rt /\
// correctness
S.equal (Rgl?.r_repr (hreg hsz) h0 rt) (Rgl?.r_repr (hreg hsz) h1 rt) /\
(let mtv = B.get h0 mt 0 in
let k = split_offset (MT?.offset mtv) k in
let j = split_offset (MT?.offset mtv) j in
b <==> MTH.mt_verify #(U32.v hsz) #hash_spec (U32.v k) (U32.v j)
(lift_path h0 mtr p) (Rgl?.r_repr (hreg hsz) h0 rt)))) | [] | MerkleTree.Low.mt_verify | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
mt: MerkleTree.Low.const_mt_p ->
k: EverCrypt.Helpers.uint64_t ->
j: EverCrypt.Helpers.uint64_t ->
mtr: FStar.Monotonic.HyperHeap.rid ->
p: MerkleTree.Low.const_path_p ->
rt: MerkleTree.Low.Datastructures.hash
-> FStar.HyperStack.ST.ST Prims.bool | {
"end_col": 3,
"end_line": 3005,
"start_col": 45,
"start_line": 2976
} |
FStar.HyperStack.ST.ST | val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j)) | [
{
"abbrev": false,
"full_module": "MerkleTree.Low.VectorExtras",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Hashfunctions",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree.Low.Datastructures",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": true,
"full_module": "MerkleTree.Spec",
"short_module": "MTS"
},
{
"abbrev": true,
"full_module": "MerkleTree.New.High",
"short_module": "MTH"
},
{
"abbrev": true,
"full_module": "FStar.UInt64",
"short_module": "U64"
},
{
"abbrev": true,
"full_module": "FStar.UInt32",
"short_module": "U32"
},
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "S"
},
{
"abbrev": true,
"full_module": "LowStar.Regional.Instances",
"short_module": "RVI"
},
{
"abbrev": true,
"full_module": "LowStar.RVector",
"short_module": "RV"
},
{
"abbrev": true,
"full_module": "LowStar.Vector",
"short_module": "V"
},
{
"abbrev": true,
"full_module": "LowStar.ConstBuffer",
"short_module": "CB"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperHeap",
"short_module": "HH"
},
{
"abbrev": true,
"full_module": "FStar.Monotonic.HyperStack",
"short_module": "MHS"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "HST"
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": false,
"full_module": "LowStar.Regional.Instances",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.RVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Regional",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Vector",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.BufferOps",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowStar.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Integers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.All",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverCrypt.Helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "MerkleTree",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun =
let hh0 = HST.get () in
if j = 0ul then begin
assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq hh0 hs)
(U32.v i) (U32.v j));
let hh1 = HST.get() in
assert (MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else
let ofs = offset_of i in
begin
(if j % 2ul = 0ul
then begin
Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
// correctness
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc))
end
else begin
if actd
then begin
RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
// memory safety
Rgl?.r_sep (hreg hsz) acc
(B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
RV.rv_inv_preserved
(V.get hh0 hs lv) (B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (V.frameOf rhs))
hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
// correctness
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
// memory safety
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc))
end
else begin
mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
// memory safety
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j
(B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved
rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
// correctness
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
end;
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv)
(Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then (Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2) (U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc) true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz) #hash_spec
(U32.v lv) (Rgl?.r_repr (hvvreg hsz) hh0 hs) (Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j) (Rgl?.r_repr (hreg hsz) hh0 acc) actd;
assert (MTH.construct_rhs #(U32.v hsz) #hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc))
end)
end | val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j))
let rec construct_rhs #hsz #hash_spec lv hs rhs i j acc actd hash_fun = | true | null | false | let hh0 = HST.get () in
if j = 0ul
then
(assert (RV.rv_inv hh0 hs);
assert (mt_safe_elts #hsz hh0 lv hs i j);
mt_safe_elts_spec #hsz hh0 lv hs 0ul 0ul;
assert (MTH.hs_wf_elts #(U32.v hsz) (U32.v lv) (RV.as_seq hh0 hs) (U32.v i) (U32.v j));
let hh1 = HST.get () in
assert (MTH.construct_rhs #(U32.v hsz)
#(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc)))
else
let ofs = offset_of i in
(if j % 2ul = 0ul
then
(Math.Lemmas.pow2_double_mult (32 - U32.v lv - 1);
mt_safe_elts_rec #hsz hh0 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc actd hash_fun;
let hh1 = HST.get () in
mt_safe_elts_spec #hsz hh0 lv hs i j;
MTH.construct_rhs_even #(U32.v hsz)
#hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd;
assert (MTH.construct_rhs #(U32.v hsz)
#hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh1 rhs, Rgl?.r_repr (hreg hsz) hh1 acc)))
else
(if actd
then
(RV.assign_copy (hcpy hsz) rhs lv acc;
let hh1 = HST.get () in
Rgl?.r_sep (hreg hsz) acc (B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved hs (B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.as_seq_preserved hs (B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
RV.rv_inv_preserved (V.get hh0 hs lv)
(B.loc_all_regions_from false (V.frameOf rhs))
hh0
hh1;
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (B.loc_all_regions_from false (V.frameOf rhs)) hh0 hh1;
mt_safe_elts_head hh1 lv hs i j;
hash_vv_rv_inv_r_inv hh1 hs lv (j - 1ul - ofs);
assert (S.equal (RV.as_seq hh1 rhs)
(S.upd (RV.as_seq hh0 rhs) (U32.v lv) (Rgl?.r_repr (hreg hsz) hh0 acc)));
hash_fun (V.index (V.index hs lv) (j - 1ul - ofs)) acc acc;
let hh2 = HST.get () in
mt_safe_elts_preserved lv hs i j (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_inv_preserved rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved rhs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh2 acc ==
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)))
else
(mt_safe_elts_head hh0 lv hs i j;
hash_vv_rv_inv_r_inv hh0 hs lv (j - 1ul - ofs);
hash_vv_rv_inv_disjoint hh0 hs lv (j - 1ul - ofs) (B.frameOf acc);
Cpy?.copy (hcpy hsz) hsz (V.index (V.index hs lv) (j - 1ul - ofs)) acc;
let hh1 = HST.get () in
V.loc_vector_within_included hs lv (V.size_of hs);
mt_safe_elts_preserved lv hs i j (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.rv_inv_preserved rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved hs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
RV.as_seq_preserved rhs (B.loc_all_regions_from false (B.frameOf acc)) hh0 hh1;
hash_vv_as_seq_get_index hh0 hs lv (j - 1ul - ofs);
assert (Rgl?.r_repr (hreg hsz) hh1 acc ==
S.index (S.index (RV.as_seq hh0 hs) (U32.v lv)) (U32.v j - 1 - U32.v ofs)));
let hh3 = HST.get () in
assert (S.equal (RV.as_seq hh3 hs) (RV.as_seq hh0 hs));
assert (S.equal (RV.as_seq hh3 rhs)
(if actd
then S.upd (RV.as_seq hh0 rhs) (U32.v lv) (Rgl?.r_repr (hreg hsz) hh0 acc)
else RV.as_seq hh0 rhs));
assert (Rgl?.r_repr (hreg hsz) hh3 acc ==
(if actd
then
(Ghost.reveal hash_spec) (S.index (S.index (RV.as_seq hh0 hs) (U32.v lv))
(U32.v j - 1 - U32.v ofs))
(Rgl?.r_repr (hreg hsz) hh0 acc)
else S.index (S.index (RV.as_seq hh0 hs) (U32.v lv)) (U32.v j - 1 - U32.v ofs)));
mt_safe_elts_rec hh3 lv hs i j;
construct_rhs #hsz #hash_spec (lv + 1ul) hs rhs (i / 2ul) (j / 2ul) acc true hash_fun;
let hh4 = HST.get () in
mt_safe_elts_spec hh3 (lv + 1ul) hs (i / 2ul) (j / 2ul);
assert (MTH.construct_rhs #(U32.v hsz)
#hash_spec
(U32.v lv + 1)
(Rgl?.r_repr (hvvreg hsz) hh3 hs)
(Rgl?.r_repr (hvreg hsz) hh3 rhs)
(U32.v i / 2)
(U32.v j / 2)
(Rgl?.r_repr (hreg hsz) hh3 acc)
true ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc));
mt_safe_elts_spec hh0 lv hs i j;
MTH.construct_rhs_odd #(U32.v hsz)
#hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd;
assert (MTH.construct_rhs #(U32.v hsz)
#hash_spec
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) hh0 hs)
(Rgl?.r_repr (hvreg hsz) hh0 rhs)
(U32.v i)
(U32.v j)
(Rgl?.r_repr (hreg hsz) hh0 acc)
actd ==
(Rgl?.r_repr (hvreg hsz) hh4 rhs, Rgl?.r_repr (hreg hsz) hh4 acc)))) | {
"checked_file": "MerkleTree.Low.fst.checked",
"dependencies": [
"prims.fst.checked",
"MerkleTree.Spec.fst.checked",
"MerkleTree.New.High.fst.checked",
"MerkleTree.Low.VectorExtras.fst.checked",
"MerkleTree.Low.Hashfunctions.fst.checked",
"MerkleTree.Low.Datastructures.fst.checked",
"LowStar.Vector.fst.checked",
"LowStar.RVector.fst.checked",
"LowStar.Regional.Instances.fst.checked",
"LowStar.Regional.fst.checked",
"LowStar.ConstBuffer.fsti.checked",
"LowStar.BufferOps.fst.checked",
"LowStar.Buffer.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.Properties.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Monotonic.HyperStack.fsti.checked",
"FStar.Monotonic.HyperHeap.fsti.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.Integers.fst.checked",
"FStar.Int.Cast.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked",
"FStar.Ghost.fsti.checked",
"FStar.All.fst.checked",
"EverCrypt.Helpers.fsti.checked"
],
"interface_file": false,
"source_file": "MerkleTree.Low.fst"
} | [
""
] | [
"MerkleTree.Low.Datastructures.hash_size_t",
"FStar.Ghost.erased",
"MerkleTree.Spec.hash_fun_t",
"FStar.UInt32.v",
"LowStar.Vector.uint32_t",
"Prims.b2t",
"FStar.Integers.op_Less_Equals",
"FStar.Integers.Unsigned",
"FStar.Integers.W32",
"MerkleTree.Low.merkle_tree_size_lg",
"MerkleTree.Low.Datastructures.hash_vv",
"Prims.op_Equality",
"LowStar.Vector.size_of",
"MerkleTree.Low.Datastructures.hash_vec",
"MerkleTree.Low.Datastructures.hash",
"MerkleTree.Low.index_t",
"Prims.op_AmpAmp",
"FStar.Integers.op_Less",
"FStar.Integers.Signed",
"FStar.Integers.Winfinite",
"Prims.pow2",
"FStar.Integers.op_Subtraction",
"Prims.bool",
"MerkleTree.Low.Hashfunctions.hash_fun_t",
"FStar.UInt32.t",
"FStar.UInt32.__uint_to_t",
"Prims._assert",
"Prims.eq2",
"FStar.Pervasives.Native.tuple2",
"MerkleTree.New.High.hashes",
"Prims.int",
"FStar.Seq.Base.length",
"MerkleTree.New.High.hash",
"MerkleTree.New.High.construct_rhs",
"FStar.Ghost.reveal",
"LowStar.Regional.__proj__Rgl__item__r_repr",
"LowStar.Regional.regional",
"MerkleTree.Low.Datastructures.hvvreg",
"MerkleTree.Low.Datastructures.hvreg",
"MerkleTree.Low.Datastructures.hreg",
"FStar.Pervasives.Native.Mktuple2",
"Prims.unit",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get",
"MerkleTree.New.High.hs_wf_elts",
"LowStar.RVector.as_seq",
"MerkleTree.Low.mt_safe_elts_spec",
"MerkleTree.Low.mt_safe_elts",
"LowStar.RVector.rv_inv",
"FStar.Integers.op_Percent",
"MerkleTree.New.High.construct_rhs_even",
"MerkleTree.Low.construct_rhs",
"FStar.Integers.op_Plus",
"FStar.Integers.op_Slash",
"MerkleTree.Low.mt_safe_elts_rec",
"FStar.Math.Lemmas.pow2_double_mult",
"MerkleTree.New.High.construct_rhs_odd",
"Spec.Hash.Definitions.bytes",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"Prims.op_GreaterThan",
"Lib.IntTypes.uint8",
"FStar.Seq.Base.index",
"LowStar.Regional.__proj__Rgl__item__repr",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.upd",
"FStar.Seq.Base.seq",
"MerkleTree.Low.Datastructures.hash_vv_as_seq_get_index",
"LowStar.RVector.as_seq_preserved",
"LowStar.Monotonic.Buffer.loc_region_only",
"LowStar.Monotonic.Buffer.frameOf",
"LowStar.Buffer.trivial_preorder",
"LowStar.RVector.rv_inv_preserved",
"MerkleTree.Low.mt_safe_elts_preserved",
"LowStar.Monotonic.Buffer.loc_all_regions_from",
"LowStar.Vector.index",
"LowStar.Vector.vector",
"MerkleTree.Low.Datastructures.hash_vv_rv_inv_r_inv",
"MerkleTree.Low.mt_safe_elts_head",
"LowStar.Vector.frameOf",
"LowStar.Vector.loc_vector_within_included",
"LowStar.Vector.get",
"LowStar.Regional.__proj__Rgl__item__r_sep",
"LowStar.RVector.assign_copy",
"MerkleTree.Low.Datastructures.hcpy",
"LowStar.RVector.__proj__Cpy__item__copy",
"MerkleTree.Low.Datastructures.hash_vv_rv_inv_disjoint",
"MerkleTree.Low.offset_of"
] | [] | module MerkleTree.Low
open EverCrypt.Helpers
open FStar.All
open FStar.Integers
open FStar.Mul
open LowStar.Buffer
open LowStar.BufferOps
open LowStar.Vector
open LowStar.Regional
open LowStar.RVector
open LowStar.Regional.Instances
module HS = FStar.HyperStack
module HST = FStar.HyperStack.ST
module MHS = FStar.Monotonic.HyperStack
module HH = FStar.Monotonic.HyperHeap
module B = LowStar.Buffer
module CB = LowStar.ConstBuffer
module V = LowStar.Vector
module RV = LowStar.RVector
module RVI = LowStar.Regional.Instances
module S = FStar.Seq
module U32 = FStar.UInt32
module U64 = FStar.UInt64
module MTH = MerkleTree.New.High
module MTS = MerkleTree.Spec
open Lib.IntTypes
open MerkleTree.Low.Datastructures
open MerkleTree.Low.Hashfunctions
open MerkleTree.Low.VectorExtras
#set-options "--z3rlimit 10 --initial_fuel 0 --max_fuel 0 --initial_ifuel 0 --max_ifuel 0"
type const_pointer (a:Type0) = b:CB.const_buffer a{CB.length b == 1 /\ CB.qual_of b == CB.MUTABLE}
/// Low-level Merkle tree data structure
///
// NOTE: because of a lack of 64-bit LowStar.Buffer support, currently
// we cannot change below to some other types.
type index_t = uint32_t
let uint32_32_max = 4294967295ul
inline_for_extraction
let uint32_max = 4294967295UL
let uint64_max = 18446744073709551615UL
let offset_range_limit = uint32_max
type offset_t = uint64_t
inline_for_extraction noextract unfold let u32_64 = Int.Cast.uint32_to_uint64
inline_for_extraction noextract unfold let u64_32 = Int.Cast.uint64_to_uint32
private inline_for_extraction
let offsets_connect (x:offset_t) (y:offset_t): Tot bool = y >= x && (y - x) <= offset_range_limit
private inline_for_extraction
let split_offset (tree:offset_t) (index:offset_t{offsets_connect tree index}): Tot index_t =
[@inline_let] let diff = U64.sub_mod index tree in
assert (diff <= offset_range_limit);
Int.Cast.uint64_to_uint32 diff
private inline_for_extraction
let add64_fits (x:offset_t) (i:index_t): Tot bool = uint64_max - x >= (u32_64 i)
private inline_for_extraction
let join_offset (tree:offset_t) (i:index_t{add64_fits tree i}): Tot (r:offset_t{offsets_connect tree r}) =
U64.add tree (u32_64 i)
inline_for_extraction val merkle_tree_size_lg: uint32_t
let merkle_tree_size_lg = 32ul
// A Merkle tree `MT i j hs rhs_ok rhs` stores all necessary hashes to generate
// a Merkle path for each element from the index `i` to `j-1`.
// - Parameters
// `hs`: a 2-dim store for hashes, where `hs[0]` contains leaf hash values.
// `rhs_ok`: to check the rightmost hashes are up-to-date
// `rhs`: a store for "rightmost" hashes, manipulated only when required to
// calculate some merkle paths that need the rightmost hashes
// as a part of them.
// `mroot`: during the construction of `rhs` we can also calculate the Merkle
// root of the tree. If `rhs_ok` is true then it has the up-to-date
// root value.
noeq type merkle_tree =
| MT: hash_size:hash_size_t ->
offset:offset_t ->
i:index_t -> j:index_t{i <= j /\ add64_fits offset j} ->
hs:hash_vv hash_size {V.size_of hs = merkle_tree_size_lg} ->
rhs_ok:bool ->
rhs:hash_vec #hash_size {V.size_of rhs = merkle_tree_size_lg} ->
mroot:hash #hash_size ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
merkle_tree
type mt_p = B.pointer merkle_tree
type const_mt_p = const_pointer merkle_tree
inline_for_extraction
let merkle_tree_conditions (#hsz:Ghost.erased hash_size_t) (offset:uint64_t) (i j:uint32_t) (hs:hash_vv hsz) (rhs_ok:bool) (rhs:hash_vec #hsz) (mroot:hash #hsz): Tot bool =
j >= i && add64_fits offset j &&
V.size_of hs = merkle_tree_size_lg &&
V.size_of rhs = merkle_tree_size_lg
// The maximum number of currently held elements in the tree is (2^32 - 1).
// cwinter: even when using 64-bit indices, we fail if the underlying 32-bit
// vector is full; this can be fixed if necessary.
private inline_for_extraction
val mt_not_full_nst: mtv:merkle_tree -> Tot bool
let mt_not_full_nst mtv = MT?.j mtv < uint32_32_max
val mt_not_full: HS.mem -> mt_p -> GTot bool
let mt_not_full h mt = mt_not_full_nst (B.get h mt 0)
/// (Memory) Safety
val offset_of: i:index_t -> Tot index_t
let offset_of i = if i % 2ul = 0ul then i else i - 1ul
// `mt_safe_elts` says that it is safe to access an element from `i` to `j - 1`
// at level `lv` in the Merkle tree, i.e., hs[lv][k] (i <= k < j) is a valid
// element.
inline_for_extraction noextract
val mt_safe_elts:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
GTot Type0 (decreases (32 - U32.v lv))
let rec mt_safe_elts #hsz h lv hs i j =
if lv = merkle_tree_size_lg then true
else (let ofs = offset_of i in
V.size_of (V.get h hs lv) == j - ofs /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul))
#push-options "--initial_fuel 1 --max_fuel 1"
val mt_safe_elts_constr:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (V.size_of (V.get h hs lv) == j - offset_of i /\
mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
(ensures (mt_safe_elts #hsz h lv hs i j))
let mt_safe_elts_constr #_ h lv hs i j = ()
val mt_safe_elts_head:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (V.size_of (V.get h hs lv) == j - offset_of i))
let mt_safe_elts_head #_ h lv hs i j = ()
val mt_safe_elts_rec:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
Lemma (requires (mt_safe_elts #hsz h lv hs i j))
(ensures (mt_safe_elts #hsz h (lv + 1ul) hs (i / 2ul) (j / 2ul)))
let mt_safe_elts_rec #_ h lv hs i j = ()
val mt_safe_elts_init:
#hsz:hash_size_t ->
h:HS.mem -> lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
Lemma (requires (V.forall_ h hs lv (V.size_of hs)
(fun hv -> V.size_of hv = 0ul)))
(ensures (mt_safe_elts #hsz h lv hs 0ul 0ul))
(decreases (32 - U32.v lv))
let rec mt_safe_elts_init #hsz h lv hs =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_init #hsz h (lv + 1ul) hs
#pop-options
val mt_safe_elts_preserved:
#hsz:hash_size_t ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t -> j:index_t{j >= i} ->
p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.live h0 hs /\
mt_safe_elts #hsz h0 lv hs i j /\
loc_disjoint p (V.loc_vector_within hs lv (V.size_of hs)) /\
modifies p h0 h1))
(ensures (mt_safe_elts #hsz h1 lv hs i j))
(decreases (32 - U32.v lv))
[SMTPat (V.live h0 hs);
SMTPat (mt_safe_elts #hsz h0 lv hs i j);
SMTPat (loc_disjoint p (RV.loc_rvector hs));
SMTPat (modifies p h0 h1)]
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_preserved #hsz lv hs i j p h0 h1 =
if lv = merkle_tree_size_lg then ()
else (V.get_preserved hs lv p h0 h1;
mt_safe_elts_preserved #hsz (lv + 1ul) hs (i / 2ul) (j / 2ul) p h0 h1)
#pop-options
// `mt_safe` is the invariant of a Merkle tree through its lifetime.
// It includes liveness, regionality, disjointness (to each data structure),
// and valid element access (`mt_safe_elts`).
inline_for_extraction noextract
val mt_safe: HS.mem -> mt_p -> GTot Type0
let mt_safe h mt =
B.live h mt /\ B.freeable mt /\
(let mtv = B.get h mt 0 in
// Liveness & Accessibility
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) /\
// Regionality
HH.extends (V.frameOf (MT?.hs mtv)) (B.frameOf mt) /\
HH.extends (V.frameOf (MT?.rhs mtv)) (B.frameOf mt) /\
HH.extends (B.frameOf (MT?.mroot mtv)) (B.frameOf mt) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (V.frameOf (MT?.rhs mtv)) /\
HH.disjoint (V.frameOf (MT?.hs mtv)) (B.frameOf (MT?.mroot mtv)) /\
HH.disjoint (V.frameOf (MT?.rhs mtv)) (B.frameOf (MT?.mroot mtv)))
// Since a Merkle tree satisfies regionality, it's ok to take all regions from
// a tree pointer as a location of the tree.
val mt_loc: mt_p -> GTot loc
let mt_loc mt = B.loc_all_regions_from false (B.frameOf mt)
val mt_safe_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (B.get h0 mt 0 == B.get h1 mt 0 /\
mt_safe h1 mt))
let mt_safe_preserved mt p h0 h1 =
assert (loc_includes (mt_loc mt) (B.loc_buffer mt));
let mtv = B.get h0 mt 0 in
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt) (RV.loc_rvector (MT?.rhs mtv)));
assert (loc_includes (mt_loc mt) (V.loc_vector (MT?.hs mtv)));
assert (loc_includes (mt_loc mt)
(B.loc_all_regions_from false (B.frameOf (MT?.mroot mtv))));
RV.rv_inv_preserved (MT?.hs mtv) p h0 h1;
RV.rv_inv_preserved (MT?.rhs mtv) p h0 h1;
Rgl?.r_sep (hreg (MT?.hash_size mtv)) (MT?.mroot mtv) p h0 h1;
V.loc_vector_within_included (MT?.hs mtv) 0ul (V.size_of (MT?.hs mtv));
mt_safe_elts_preserved 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv) p h0 h1
/// Lifting to a high-level Merkle tree structure
val mt_safe_elts_spec:
#hsz:hash_size_t ->
h:HS.mem ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{j >= i} ->
Lemma (requires (RV.rv_inv h hs /\
mt_safe_elts #hsz h lv hs i j))
(ensures (MTH.hs_wf_elts #(U32.v hsz)
(U32.v lv) (RV.as_seq h hs)
(U32.v i) (U32.v j)))
(decreases (32 - U32.v lv))
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let rec mt_safe_elts_spec #_ h lv hs i j =
if lv = merkle_tree_size_lg then ()
else mt_safe_elts_spec h (lv + 1ul) hs (i / 2ul) (j / 2ul)
#pop-options
val merkle_tree_lift:
h:HS.mem ->
mtv:merkle_tree{
RV.rv_inv h (MT?.hs mtv) /\
RV.rv_inv h (MT?.rhs mtv) /\
Rgl?.r_inv (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv) /\
mt_safe_elts #(MT?.hash_size mtv) h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv)} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size mtv)) {MTH.mt_wf_elts #_ r})
let merkle_tree_lift h mtv =
mt_safe_elts_spec h 0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv);
MTH.MT #(U32.v (MT?.hash_size mtv))
(U32.v (MT?.i mtv))
(U32.v (MT?.j mtv))
(RV.as_seq h (MT?.hs mtv))
(MT?.rhs_ok mtv)
(RV.as_seq h (MT?.rhs mtv))
(Rgl?.r_repr (hreg (MT?.hash_size mtv)) h (MT?.mroot mtv))
(Ghost.reveal (MT?.hash_spec mtv))
val mt_lift:
h:HS.mem -> mt:mt_p{mt_safe h mt} ->
GTot (r:MTH.merkle_tree #(U32.v (MT?.hash_size (B.get h mt 0))) {MTH.mt_wf_elts #_ r})
let mt_lift h mt =
merkle_tree_lift h (B.get h mt 0)
val mt_preserved:
mt:mt_p -> p:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (mt_safe h0 mt /\
loc_disjoint p (mt_loc mt) /\
modifies p h0 h1))
(ensures (mt_safe_preserved mt p h0 h1;
mt_lift h0 mt == mt_lift h1 mt))
let mt_preserved mt p h0 h1 =
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer mt));
B.modifies_buffer_elim mt p h0 h1;
assert (B.get h0 mt 0 == B.get h1 mt 0);
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.hs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(RV.loc_rvector (MT?.rhs (B.get h0 mt 0))));
assert (loc_includes (B.loc_all_regions_from false (B.frameOf mt))
(B.loc_buffer (MT?.mroot (B.get h0 mt 0))));
RV.as_seq_preserved (MT?.hs (B.get h0 mt 0)) p h0 h1;
RV.as_seq_preserved (MT?.rhs (B.get h0 mt 0)) p h0 h1;
B.modifies_buffer_elim (MT?.mroot (B.get h0 mt 0)) p h0 h1
/// Construction
// Note that the public function for creation is `mt_create` defined below,
// which builds a tree with an initial hash.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val create_empty_mt:
hash_size:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hash_size)) ->
hash_fun:hash_fun_t #hash_size #hash_spec ->
r:HST.erid ->
HST.ST mt_p
(requires (fun _ -> true))
(ensures (fun h0 mt h1 ->
let dmt = B.get h1 mt 0 in
// memory safety
B.frameOf mt = r /\
modifies (mt_loc mt) h0 h1 /\
mt_safe h1 mt /\
mt_not_full h1 mt /\
// correctness
MT?.hash_size dmt = hash_size /\
MT?.offset dmt = 0UL /\
merkle_tree_lift h1 dmt == MTH.create_empty_mt #_ #(Ghost.reveal hash_spec) ()))
let create_empty_mt hsz hash_spec hash_fun r =
[@inline_let] let hrg = hreg hsz in
[@inline_let] let hvrg = hvreg hsz in
[@inline_let] let hvvrg = hvvreg hsz in
let hs_region = HST.new_region r in
let hs = RV.alloc_rid hvrg merkle_tree_size_lg hs_region in
let h0 = HST.get () in
mt_safe_elts_init #hsz h0 0ul hs;
let rhs_region = HST.new_region r in
let rhs = RV.alloc_rid hrg merkle_tree_size_lg rhs_region in
let h1 = HST.get () in
assert (RV.as_seq h1 rhs == S.create 32 (MTH.hash_init #(U32.v hsz)));
RV.rv_inv_preserved hs (V.loc_vector rhs) h0 h1;
RV.as_seq_preserved hs (V.loc_vector rhs) h0 h1;
V.loc_vector_within_included hs 0ul (V.size_of hs);
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul (V.loc_vector rhs) h0 h1;
let mroot_region = HST.new_region r in
let mroot = rg_alloc hrg mroot_region in
let h2 = HST.get () in
RV.as_seq_preserved hs loc_none h1 h2;
RV.as_seq_preserved rhs loc_none h1 h2;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h1 h2;
let mt = B.malloc r (MT hsz 0UL 0ul 0ul hs false rhs mroot hash_spec hash_fun) 1ul in
let h3 = HST.get () in
RV.as_seq_preserved hs loc_none h2 h3;
RV.as_seq_preserved rhs loc_none h2 h3;
Rgl?.r_sep hrg mroot loc_none h2 h3;
mt_safe_elts_preserved #hsz 0ul hs 0ul 0ul loc_none h2 h3;
mt
#pop-options
/// Destruction (free)
val mt_free: mt:mt_p ->
HST.ST unit
(requires (fun h0 -> mt_safe h0 mt))
(ensures (fun h0 _ h1 -> modifies (mt_loc mt) h0 h1))
#push-options "--z3rlimit 100"
let mt_free mt =
let mtv = !*mt in
RV.free (MT?.hs mtv);
RV.free (MT?.rhs mtv);
[@inline_let] let rg = hreg (MT?.hash_size mtv) in
rg_free rg (MT?.mroot mtv);
B.free mt
#pop-options
/// Insertion
private
val as_seq_sub_upd:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector #a #rst rg ->
i:uint32_t{i < V.size_of rv} -> v:Rgl?.repr rg ->
Lemma (requires (RV.rv_inv h rv))
(ensures (S.equal (S.upd (RV.as_seq h rv) (U32.v i) v)
(S.append
(RV.as_seq_sub h rv 0ul i)
(S.cons v (RV.as_seq_sub h rv (i + 1ul) (V.size_of rv))))))
#push-options "--z3rlimit 20"
let as_seq_sub_upd #a #rst #rg h rv i v =
Seq.Properties.slice_upd (RV.as_seq h rv) 0 (U32.v i) (U32.v i) v;
Seq.Properties.slice_upd (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)) (U32.v i) v;
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) 0 (U32.v i);
assert (S.equal (S.slice (RV.as_seq h rv) 0 (U32.v i))
(RV.as_seq_sub h rv 0ul i));
RV.as_seq_seq_slice rg h (V.as_seq h rv)
0 (U32.v (V.size_of rv)) (U32.v i + 1) (U32.v (V.size_of rv));
assert (S.equal (S.slice (RV.as_seq h rv) (U32.v i + 1) (U32.v (V.size_of rv)))
(RV.as_seq_sub h rv (i + 1ul) (V.size_of rv)));
assert (S.index (S.upd (RV.as_seq h rv) (U32.v i) v) (U32.v i) == v)
#pop-options
// `hash_vv_insert_copy` inserts a hash element at a level `lv`, by copying
// and pushing its content to `hs[lv]`. For detailed insertion procedure, see
// `insert_` and `mt_insert`.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1"
private
inline_for_extraction
val hash_vv_insert_copy:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (V.frameOf hs) (B.frameOf v) /\
mt_safe_elts #hsz h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 v /\
V.size_of (V.get h1 hs lv) == j + 1ul - offset_of (Ghost.reveal i) /\
V.size_of (V.get h1 hs lv) == V.size_of (V.get h0 hs lv) + 1ul /\
mt_safe_elts #hsz h1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul) /\
RV.rv_loc_elems h0 hs (lv + 1ul) (V.size_of hs) ==
RV.rv_loc_elems h1 hs (lv + 1ul) (V.size_of hs) /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.hashess_insert
(U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 v))) /\
S.equal (S.index (RV.as_seq h1 hs) (U32.v lv))
(S.snoc (S.index (RV.as_seq h0 hs) (U32.v lv))
(Rgl?.r_repr (hreg hsz) h0 v))))
let hash_vv_insert_copy #hsz lv i j hs v =
let hh0 = HST.get () in
mt_safe_elts_rec hh0 lv hs (Ghost.reveal i) j;
/// 1) Insert an element at the level `lv`, where the new vector is not yet
/// connected to `hs`.
let ihv = RV.insert_copy (hcpy hsz) (V.index hs lv) v in
let hh1 = HST.get () in
// 1-0) Basic disjointness conditions
V.forall2_forall_left hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.forall2_forall_right hh0 hs 0ul (V.size_of hs) lv
(fun b1 b2 -> HH.disjoint (Rgl?.region_of (hvreg hsz) b1)
(Rgl?.region_of (hvreg hsz) b2));
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
// 1-1) For the `modifies` postcondition.
assert (modifies (RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv)) hh0 hh1);
// 1-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1;
// 1-3) For `mt_safe_elts`
assert (V.size_of ihv == j + 1ul - offset_of (Ghost.reveal i)); // head updated
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(RV.loc_rvector (V.get hh0 hs lv)) hh0 hh1; // tail not yet
// 1-4) For the `rv_inv` postcondition
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs)) 0 (U32.v lv) (U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v lv);
RV.rv_elems_inv_preserved
hs 0ul lv (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs 0ul lv);
RV.rs_loc_elems_elem_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
0 (U32.v (V.size_of hs))
(U32.v lv + 1) (U32.v (V.size_of hs))
(U32.v lv);
RV.rs_loc_elems_parent_disj
(hvreg hsz) (V.as_seq hh0 hs) (V.frameOf hs)
(U32.v lv + 1) (U32.v (V.size_of hs));
RV.rv_elems_inv_preserved
hs (lv + 1ul) (V.size_of hs) (RV.loc_rvector (V.get hh0 hs lv))
hh0 hh1;
assert (RV.rv_elems_inv hh1 hs (lv + 1ul) (V.size_of hs));
// assert (rv_itself_inv hh1 hs);
// assert (elems_reg hh1 hs);
// 1-5) Correctness
assert (S.equal (RV.as_seq hh1 ihv)
(S.snoc (RV.as_seq hh0 (V.get hh0 hs lv)) (Rgl?.r_repr (hreg hsz) hh0 v)));
/// 2) Assign the updated vector to `hs` at the level `lv`.
RV.assign hs lv ihv;
let hh2 = HST.get () in
// 2-1) For the `modifies` postcondition.
assert (modifies (V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2);
assert (modifies (loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) hh0 hh2);
// 2-2) Preservation
Rgl?.r_sep (hreg hsz) v (RV.loc_rvector hs) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-3) For `mt_safe_elts`
assert (V.size_of (V.get hh2 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(V.loc_vector_within hs lv (lv + 1ul)) hh1 hh2;
// 2-4) Correctness
RV.as_seq_sub_preserved hs 0ul lv (loc_rvector ihv) hh0 hh1;
RV.as_seq_sub_preserved hs (lv + 1ul) merkle_tree_size_lg (loc_rvector ihv) hh0 hh1;
assert (S.equal (RV.as_seq hh2 hs)
(S.append
(RV.as_seq_sub hh0 hs 0ul lv)
(S.cons (RV.as_seq hh1 ihv)
(RV.as_seq_sub hh0 hs (lv + 1ul) merkle_tree_size_lg))));
as_seq_sub_upd hh0 hs lv (RV.as_seq hh1 ihv)
#pop-options
private
val insert_index_helper_even:
lv:uint32_t{lv < merkle_tree_size_lg} ->
j:index_t{U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul <> 1ul))
(ensures (U32.v j % 2 <> 1 /\ j / 2ul == (j + 1ul) / 2ul))
let insert_index_helper_even lv j = ()
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
private
val insert_index_helper_odd:
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && U32.v j < pow2 (32 - U32.v lv) - 1} ->
Lemma (requires (j % 2ul = 1ul /\
j < uint32_32_max))
(ensures (U32.v j % 2 = 1 /\
U32.v (j / 2ul) < pow2 (32 - U32.v (lv + 1ul)) - 1 /\
(j + 1ul) / 2ul == j / 2ul + 1ul /\
j - offset_of i > 0ul))
let insert_index_helper_odd lv i j = ()
#pop-options
private
val loc_union_assoc_4:
a:loc -> b:loc -> c:loc -> d:loc ->
Lemma (loc_union (loc_union a b) (loc_union c d) ==
loc_union (loc_union a c) (loc_union b d))
let loc_union_assoc_4 a b c d =
loc_union_assoc (loc_union a b) c d;
loc_union_assoc a b c;
loc_union_assoc a c b;
loc_union_assoc (loc_union a c) b d
private
val insert_modifies_rec_helper:
#hsz:hash_size_t ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
aloc:loc ->
h:HS.mem ->
Lemma (loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
aloc)
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc) ==
loc_union
(loc_union
(RV.rv_loc_elems h hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
aloc)
#push-options "--z3rlimit 100 --initial_fuel 2 --max_fuel 2"
let insert_modifies_rec_helper #hsz lv hs aloc h =
assert (V.loc_vector_within hs lv (V.size_of hs) ==
loc_union (V.loc_vector_within hs lv (lv + 1ul))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)));
RV.rs_loc_elems_rec_inverse (hvreg hsz) (V.as_seq h hs) (U32.v lv) (U32.v (V.size_of hs));
assert (RV.rv_loc_elems h hs lv (V.size_of hs) ==
loc_union (RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs)));
// Applying some association rules...
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))) aloc
(loc_union
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc);
loc_union_assoc
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))) aloc aloc;
loc_union_assoc
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(loc_union
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
aloc;
loc_union_assoc_4
(RV.rs_loc_elem (hvreg hsz) (V.as_seq h hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems h hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
#pop-options
private
val insert_modifies_union_loc_weakening:
l1:loc -> l2:loc -> l3:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (modifies l1 h0 h1))
(ensures (modifies (loc_union (loc_union l1 l2) l3) h0 h1))
let insert_modifies_union_loc_weakening l1 l2 l3 h0 h1 =
B.loc_includes_union_l l1 l2 l1;
B.loc_includes_union_l (loc_union l1 l2) l3 (loc_union l1 l2)
private
val insert_snoc_last_helper:
#a:Type -> s:S.seq a{S.length s > 0} -> v:a ->
Lemma (S.index (S.snoc s v) (S.length s - 1) == S.last s)
let insert_snoc_last_helper #a s v = ()
private
val rv_inv_rv_elems_reg:
#a:Type0 -> #rst:Type -> #rg:regional rst a ->
h:HS.mem -> rv:rvector rg ->
i:uint32_t -> j:uint32_t{i <= j && j <= V.size_of rv} ->
Lemma (requires (RV.rv_inv h rv))
(ensures (RV.rv_elems_reg h rv i j))
let rv_inv_rv_elems_reg #a #rst #rg h rv i j = ()
// `insert_` recursively inserts proper hashes to each level `lv` by
// accumulating a compressed hash. For example, if there are three leaf elements
// in the tree, `insert_` will change `hs` as follow:
// (`hij` is a compressed hash from `hi` to `hj`)
//
// BEFORE INSERTION AFTER INSERTION
// lv
// 0 h0 h1 h2 ====> h0 h1 h2 h3
// 1 h01 h01 h23
// 2 h03
//
private
val insert_:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv < merkle_tree_size_lg} ->
i:Ghost.erased index_t ->
j:index_t{
Ghost.reveal i <= j &&
U32.v j < pow2 (32 - U32.v lv) - 1 &&
j < uint32_32_max} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
acc:hash #hsz ->
hash_fun:hash_fun_t #hsz #hash_spec ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (V.frameOf hs) (B.frameOf acc) /\
mt_safe_elts h0 lv hs (Ghost.reveal i) j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(loc_union
(RV.rv_loc_elems h0 hs lv (V.size_of hs))
(V.loc_vector_within hs lv (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 hs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
mt_safe_elts h1 lv hs (Ghost.reveal i) (j + 1ul) /\
// correctness
(mt_safe_elts_spec h0 lv hs (Ghost.reveal i) j;
S.equal (RV.as_seq h1 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq h0 hs) (Rgl?.r_repr (hreg hsz) h0 acc)))))
(decreases (U32.v j))
#push-options "--z3rlimit 800 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec insert_ #hsz #hash_spec lv i j hs acc hash_fun =
let hh0 = HST.get () in
hash_vv_insert_copy lv i j hs acc;
let hh1 = HST.get () in
// Base conditions
V.loc_vector_within_included hs lv (lv + 1ul);
V.loc_vector_within_included hs (lv + 1ul) (V.size_of hs);
V.loc_vector_within_disjoint hs lv (lv + 1ul) (lv + 1ul) (V.size_of hs);
assert (V.size_of (V.get hh1 hs lv) == j + 1ul - offset_of (Ghost.reveal i));
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul));
if j % 2ul = 1ul
then (insert_index_helper_odd lv (Ghost.reveal i) j;
assert (S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) > 0);
let lvhs = V.index hs lv in
assert (U32.v (V.size_of lvhs) ==
S.length (S.index (RV.as_seq hh0 hs) (U32.v lv)) + 1);
assert (V.size_of lvhs > 1ul);
/// 3) Update the accumulator `acc`.
hash_vec_rv_inv_r_inv hh1 (V.get hh1 hs lv) (V.size_of (V.get hh1 hs lv) - 2ul);
assert (Rgl?.r_inv (hreg hsz) hh1 acc);
hash_fun (V.index lvhs (V.size_of lvhs - 2ul)) acc acc;
let hh2 = HST.get () in
// 3-1) For the `modifies` postcondition
assert (modifies (B.loc_all_regions_from false (B.frameOf acc)) hh1 hh2);
assert (modifies
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh2);
// 3-2) Preservation
RV.rv_inv_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.as_seq_preserved
hs (B.loc_region_only false (B.frameOf acc)) hh1 hh2;
RV.rv_loc_elems_preserved
hs (lv + 1ul) (V.size_of hs)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2;
assert (RV.rv_inv hh2 hs);
assert (Rgl?.r_inv (hreg hsz) hh2 acc);
// 3-3) For `mt_safe_elts`
V.get_preserved hs lv
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // head preserved
mt_safe_elts_preserved
(lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul)
(B.loc_region_only false (B.frameOf acc)) hh1 hh2; // tail preserved
// 3-4) Correctness
insert_snoc_last_helper
(RV.as_seq hh0 (V.get hh0 hs lv))
(Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (Rgl?.r_repr (hreg hsz) hh2 acc) // `nacc` in `MTH.insert_`
((Ghost.reveal hash_spec)
(S.last (S.index (RV.as_seq hh0 hs) (U32.v lv)))
(Rgl?.r_repr (hreg hsz) hh0 acc)));
/// 4) Recursion
insert_ (lv + 1ul)
(Ghost.hide (Ghost.reveal i / 2ul)) (j / 2ul)
hs acc hash_fun;
let hh3 = HST.get () in
// 4-0) Memory safety brought from the postcondition of the recursion
assert (RV.rv_inv hh3 hs);
assert (Rgl?.r_inv (hreg hsz) hh3 acc);
assert (modifies (loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3);
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh3);
// 4-1) For `mt_safe_elts`
rv_inv_rv_elems_reg hh2 hs (lv + 1ul) (V.size_of hs);
RV.rv_loc_elems_included hh2 hs (lv + 1ul) (V.size_of hs);
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)));
assert (loc_disjoint
(V.loc_vector_within hs lv (lv + 1ul))
(B.loc_all_regions_from false (B.frameOf acc)));
V.get_preserved hs lv
(loc_union
(loc_union
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs))
(RV.rv_loc_elems hh2 hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh2 hh3;
assert (V.size_of (V.get hh3 hs lv) ==
j + 1ul - offset_of (Ghost.reveal i)); // head preserved
assert (mt_safe_elts hh3 (lv + 1ul) hs
(Ghost.reveal i / 2ul) (j / 2ul + 1ul)); // tail by recursion
mt_safe_elts_constr hh3 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh3 lv hs (Ghost.reveal i) (j + 1ul));
// 4-2) Correctness
mt_safe_elts_spec hh2 (lv + 1ul) hs (Ghost.reveal i / 2ul) (j / 2ul);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv + 1) (U32.v (Ghost.reveal i) / 2) (U32.v j / 2)
(RV.as_seq hh2 hs) (Rgl?.r_repr (hreg hsz) hh2 acc)));
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_rec #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh3 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))))
else (insert_index_helper_even lv j;
// memory safety
assert (mt_safe_elts hh1 (lv + 1ul) hs (Ghost.reveal i / 2ul) ((j + 1ul) / 2ul));
mt_safe_elts_constr hh1 lv hs (Ghost.reveal i) (j + 1ul);
assert (mt_safe_elts hh1 lv hs (Ghost.reveal i) (j + 1ul));
assert (modifies
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
hh0 hh1);
insert_modifies_union_loc_weakening
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc)))
hh0 hh1;
// correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
MTH.insert_base #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc);
assert (S.equal (RV.as_seq hh1 hs)
(MTH.insert_ #(U32.v hsz) #(Ghost.reveal hash_spec) (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))));
/// 5) Proving the postcondition after recursion
let hh4 = HST.get () in
// 5-1) For the `modifies` postcondition.
assert (modifies
(loc_union
(loc_union
(loc_union
(RV.rs_loc_elem (hvreg hsz) (V.as_seq hh0 hs) (U32.v lv))
(V.loc_vector_within hs lv (lv + 1ul)))
(B.loc_all_regions_from false (B.frameOf acc)))
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs (lv + 1ul) (V.size_of hs))
(V.loc_vector_within hs (lv + 1ul) (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf acc))))
hh0 hh4);
insert_modifies_rec_helper
lv hs (B.loc_all_regions_from false (B.frameOf acc)) hh0;
// 5-2) For `mt_safe_elts`
assert (mt_safe_elts hh4 lv hs (Ghost.reveal i) (j + 1ul));
// 5-3) Preservation
assert (RV.rv_inv hh4 hs);
assert (Rgl?.r_inv (hreg hsz) hh4 acc);
// 5-4) Correctness
mt_safe_elts_spec hh0 lv hs (Ghost.reveal i) j;
assert (S.equal (RV.as_seq hh4 hs)
(MTH.insert_ #(U32.v hsz) #hash_spec (U32.v lv) (U32.v (Ghost.reveal i)) (U32.v j)
(RV.as_seq hh0 hs) (Rgl?.r_repr (hreg hsz) hh0 acc))) // QED
#pop-options
private inline_for_extraction
val mt_insert_pre_nst: mtv:merkle_tree -> v:hash #(MT?.hash_size mtv) -> Tot bool
let mt_insert_pre_nst mtv v = mt_not_full_nst mtv && add64_fits (MT?.offset mtv) ((MT?.j mtv) + 1ul)
val mt_insert_pre: #hsz:Ghost.erased hash_size_t -> mt:const_mt_p -> v:hash #hsz -> HST.ST bool
(requires (fun h0 -> mt_safe h0 (CB.cast mt) /\ (MT?.hash_size (B.get h0 (CB.cast mt) 0)) = Ghost.reveal hsz))
(ensures (fun _ _ _ -> True))
let mt_insert_pre #hsz mt v =
let mt = !*(CB.cast mt) in
assert (MT?.hash_size mt == (MT?.hash_size mt));
mt_insert_pre_nst mt v
// `mt_insert` inserts a hash to a Merkle tree. Note that this operation
// manipulates the content in `v`, since it uses `v` as an accumulator during
// insertion.
#push-options "--z3rlimit 100 --initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
val mt_insert:
hsz:Ghost.erased hash_size_t ->
mt:mt_p -> v:hash #hsz ->
HST.ST unit
(requires (fun h0 ->
let dmt = B.get h0 mt 0 in
mt_safe h0 mt /\
Rgl?.r_inv (hreg hsz) h0 v /\
HH.disjoint (B.frameOf mt) (B.frameOf v) /\
MT?.hash_size dmt = Ghost.reveal hsz /\
mt_insert_pre_nst dmt v))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(mt_loc mt)
(B.loc_all_regions_from false (B.frameOf v)))
h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = Ghost.reveal hsz /\
mt_lift h1 mt == MTH.mt_insert (mt_lift h0 mt) (Rgl?.r_repr (hreg hsz) h0 v)))
#pop-options
#push-options "--z3rlimit 40"
let mt_insert hsz mt v =
let hh0 = HST.get () in
let mtv = !*mt in
let hs = MT?.hs mtv in
let hsz = MT?.hash_size mtv in
insert_ #hsz #(Ghost.reveal (MT?.hash_spec mtv)) 0ul (Ghost.hide (MT?.i mtv)) (MT?.j mtv) hs v (MT?.hash_fun mtv);
let hh1 = HST.get () in
RV.rv_loc_elems_included hh0 (MT?.hs mtv) 0ul (V.size_of hs);
V.loc_vector_within_included hs 0ul (V.size_of hs);
RV.rv_inv_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
RV.as_seq_preserved
(MT?.rhs mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv)
(loc_union
(loc_union
(RV.rv_loc_elems hh0 hs 0ul (V.size_of hs))
(V.loc_vector_within hs 0ul (V.size_of hs)))
(B.loc_all_regions_from false (B.frameOf v)))
hh0 hh1;
mt *= MT (MT?.hash_size mtv)
(MT?.offset mtv)
(MT?.i mtv)
(MT?.j mtv + 1ul)
(MT?.hs mtv)
false // `rhs` is always deprecated right after an insertion.
(MT?.rhs mtv)
(MT?.mroot mtv)
(MT?.hash_spec mtv)
(MT?.hash_fun mtv);
let hh2 = HST.get () in
RV.rv_inv_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.rv_inv_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.hs mtv) (B.loc_buffer mt) hh1 hh2;
RV.as_seq_preserved
(MT?.rhs mtv) (B.loc_buffer mt) hh1 hh2;
Rgl?.r_sep (hreg hsz) (MT?.mroot mtv) (B.loc_buffer mt) hh1 hh2;
mt_safe_elts_preserved
0ul (MT?.hs mtv) (MT?.i mtv) (MT?.j mtv + 1ul) (B.loc_buffer mt)
hh1 hh2
#pop-options
// `mt_create` initiates a Merkle tree with a given initial hash `init`.
// A valid Merkle tree should contain at least one element.
val mt_create_custom:
hsz:hash_size_t ->
hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
r:HST.erid -> init:hash #hsz -> hash_fun:hash_fun_t #hsz #hash_spec -> HST.ST mt_p
(requires (fun h0 ->
Rgl?.r_inv (hreg hsz) h0 init /\
HH.disjoint r (B.frameOf init)))
(ensures (fun h0 mt h1 ->
// memory safety
modifies (loc_union (mt_loc mt) (B.loc_all_regions_from false (B.frameOf init))) h0 h1 /\
mt_safe h1 mt /\
// correctness
MT?.hash_size (B.get h1 mt 0) = hsz /\
mt_lift h1 mt == MTH.mt_create (U32.v hsz) (Ghost.reveal hash_spec) (Rgl?.r_repr (hreg hsz) h0 init)))
#push-options "--z3rlimit 40"
let mt_create_custom hsz hash_spec r init hash_fun =
let hh0 = HST.get () in
let mt = create_empty_mt hsz hash_spec hash_fun r in
mt_insert hsz mt init;
let hh2 = HST.get () in
mt
#pop-options
/// Construction and Destruction of paths
// Since each element pointer in `path` is from the target Merkle tree and
// each element has different location in `MT?.hs` (thus different region id),
// we cannot use the regionality property for `path`s. Hence here we manually
// define invariants and representation.
noeq type path =
| Path: hash_size:hash_size_t ->
hashes:V.vector (hash #hash_size) ->
path
type path_p = B.pointer path
type const_path_p = const_pointer path
private
let phashes (h:HS.mem) (p:path_p)
: GTot (V.vector (hash #(Path?.hash_size (B.get h p 0))))
= Path?.hashes (B.get h p 0)
// Memory safety of a path as an invariant
inline_for_extraction noextract
val path_safe:
h:HS.mem -> mtr:HH.rid -> p:path_p -> GTot Type0
let path_safe h mtr p =
B.live h p /\ B.freeable p /\
V.live h (phashes h p) /\ V.freeable (phashes h p) /\
HST.is_eternal_region (V.frameOf (phashes h p)) /\
(let hsz = Path?.hash_size (B.get h p 0) in
V.forall_all h (phashes h p)
(fun hp -> Rgl?.r_inv (hreg hsz) h hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
HH.extends (V.frameOf (phashes h p)) (B.frameOf p) /\
HH.disjoint mtr (B.frameOf p))
val path_loc: path_p -> GTot loc
let path_loc p = B.loc_all_regions_from false (B.frameOf p)
val lift_path_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat ->
j:nat{
i <= j /\ j <= S.length hs /\
V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = j - i}) (decreases j)
let rec lift_path_ #hsz h hs i j =
if i = j then S.empty
else (S.snoc (lift_path_ h hs i (j - 1))
(Rgl?.r_repr (hreg hsz) h (S.index hs (j - 1))))
// Representation of a path
val lift_path:
#hsz:hash_size_t ->
h:HS.mem -> mtr:HH.rid -> p:path_p {path_safe h mtr p /\ (Path?.hash_size (B.get h p 0)) = hsz} ->
GTot (hp:MTH.path #(U32.v hsz) {S.length hp = U32.v (V.size_of (phashes h p))})
let lift_path #hsz h mtr p =
lift_path_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p)))
val lift_path_index_:
#hsz:hash_size_t ->
h:HS.mem ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
k:nat{i <= k && k < j} ->
Lemma (requires (V.forall_seq hs i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (Rgl?.r_repr (hreg hsz) h (S.index hs k) ==
S.index (lift_path_ h hs i j) (k - i)))
(decreases j)
[SMTPat (S.index (lift_path_ h hs i j) (k - i))]
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec lift_path_index_ #hsz h hs i j k =
if i = j then ()
else if k = j - 1 then ()
else lift_path_index_ #hsz h hs i (j - 1) k
#pop-options
val lift_path_index:
h:HS.mem -> mtr:HH.rid ->
p:path_p -> i:uint32_t ->
Lemma (requires (path_safe h mtr p /\
i < V.size_of (phashes h p)))
(ensures (let hsz = Path?.hash_size (B.get h p 0) in
Rgl?.r_repr (hreg hsz) h (V.get h (phashes h p) i) ==
S.index (lift_path #(hsz) h mtr p) (U32.v i)))
let lift_path_index h mtr p i =
lift_path_index_ h (V.as_seq h (phashes h p))
0 (S.length (V.as_seq h (phashes h p))) (U32.v i)
val lift_path_eq:
#hsz:hash_size_t ->
h:HS.mem ->
hs1:S.seq (hash #hsz) -> hs2:S.seq (hash #hsz) ->
i:nat -> j:nat ->
Lemma (requires (i <= j /\ j <= S.length hs1 /\ j <= S.length hs2 /\
S.equal (S.slice hs1 i j) (S.slice hs2 i j) /\
V.forall_seq hs1 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp) /\
V.forall_seq hs2 i j (fun hp -> Rgl?.r_inv (hreg hsz) h hp)))
(ensures (S.equal (lift_path_ h hs1 i j) (lift_path_ h hs2 i j)))
let lift_path_eq #hsz h hs1 hs2 i j =
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs1 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 k));
assert (forall (k:nat{i <= k && k < j}).
S.index (lift_path_ h hs2 i j) (k - i) ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 k));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs1 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs1 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (lift_path_ h hs2 i j) k ==
Rgl?.r_repr (hreg hsz) h (S.index hs2 (k + i)));
assert (forall (k:nat{k < j - i}).
S.index (S.slice hs1 i j) k == S.index (S.slice hs2 i j) k);
assert (forall (k:nat{i <= k && k < j}).
S.index (S.slice hs1 i j) (k - i) == S.index (S.slice hs2 i j) (k - i))
private
val path_safe_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid -> hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma
(requires (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (V.forall_seq hs i j
(fun hp ->
Rgl?.r_inv (hreg hsz) h1 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp))))
(decreases j)
let rec path_safe_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1;
path_safe_preserved_ mtr hs i (j - 1) dl h0 h1)
val path_safe_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p))
let path_safe_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_safe_preserved_
mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p))) dl h0 h1
val path_safe_init_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
V.size_of (phashes h0 p) = 0ul /\
B.loc_disjoint dl (path_loc p) /\
modifies dl h0 h1))
(ensures (path_safe h1 mtr p /\
V.size_of (phashes h1 p) = 0ul))
let path_safe_init_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)))
val path_preserved_:
#hsz:hash_size_t ->
mtr:HH.rid ->
hs:S.seq (hash #hsz) ->
i:nat -> j:nat{i <= j && j <= S.length hs} ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (V.forall_seq hs i j
(fun hp -> Rgl?.r_inv (hreg hsz) h0 hp /\
HH.includes mtr (Rgl?.region_of (hreg hsz) hp)) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved_ mtr hs i j dl h0 h1;
S.equal (lift_path_ h0 hs i j)
(lift_path_ h1 hs i j)))
(decreases j)
#push-options "--initial_fuel 1 --max_fuel 1 --initial_ifuel 1 --max_ifuel 1"
let rec path_preserved_ #hsz mtr hs i j dl h0 h1 =
if i = j then ()
else (path_safe_preserved_ mtr hs i (j - 1) dl h0 h1;
path_preserved_ mtr hs i (j - 1) dl h0 h1;
assert (loc_includes
(B.loc_all_regions_from false mtr)
(B.loc_all_regions_from false
(Rgl?.region_of (hreg hsz) (S.index hs (j - 1)))));
Rgl?.r_sep (hreg hsz) (S.index hs (j - 1)) dl h0 h1)
#pop-options
val path_preserved:
mtr:HH.rid -> p:path_p ->
dl:loc -> h0:HS.mem -> h1:HS.mem ->
Lemma (requires (path_safe h0 mtr p /\
loc_disjoint dl (path_loc p) /\
loc_disjoint dl (B.loc_all_regions_from false mtr) /\
modifies dl h0 h1))
(ensures (path_safe_preserved mtr p dl h0 h1;
let hsz0 = (Path?.hash_size (B.get h0 p 0)) in
let hsz1 = (Path?.hash_size (B.get h1 p 0)) in
let b:MTH.path = lift_path #hsz0 h0 mtr p in
let a:MTH.path = lift_path #hsz1 h1 mtr p in
hsz0 = hsz1 /\ S.equal b a))
let path_preserved mtr p dl h0 h1 =
assert (loc_includes (path_loc p) (B.loc_buffer p));
assert (loc_includes (path_loc p) (V.loc_vector (phashes h0 p)));
path_preserved_ mtr (V.as_seq h0 (phashes h0 p))
0 (S.length (V.as_seq h0 (phashes h0 p)))
dl h0 h1
val init_path:
hsz:hash_size_t ->
mtr:HH.rid -> r:HST.erid ->
HST.ST path_p
(requires (fun h0 -> HH.disjoint mtr r))
(ensures (fun h0 p h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
Path?.hash_size (B.get h1 p 0) = hsz /\
S.equal (lift_path #hsz h1 mtr p) S.empty))
let init_path hsz mtr r =
let nrid = HST.new_region r in
(B.malloc r (Path hsz (rg_alloc (hvreg hsz) nrid)) 1ul)
val clear_path:
mtr:HH.rid -> p:path_p ->
HST.ST unit
(requires (fun h0 -> path_safe h0 mtr p))
(ensures (fun h0 _ h1 ->
// memory safety
path_safe h1 mtr p /\
// correctness
V.size_of (phashes h1 p) = 0ul /\
S.equal (lift_path #(Path?.hash_size (B.get h1 p 0)) h1 mtr p) S.empty))
let clear_path mtr p =
let pv = !*p in
p *= Path (Path?.hash_size pv) (V.clear (Path?.hashes pv))
val free_path:
p:path_p ->
HST.ST unit
(requires (fun h0 ->
B.live h0 p /\ B.freeable p /\
V.live h0 (phashes h0 p) /\ V.freeable (phashes h0 p) /\
HH.extends (V.frameOf (phashes h0 p)) (B.frameOf p)))
(ensures (fun h0 _ h1 ->
modifies (path_loc p) h0 h1))
let free_path p =
let pv = !*p in
V.free (Path?.hashes pv);
B.free p
/// Getting the Merkle root and path
// Construct "rightmost hashes" for a given (incomplete) Merkle tree.
// This function calculates the Merkle root as well, which is the final
// accumulator value.
private
val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j)) | false | false | MerkleTree.Low.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 1,
"initial_ifuel": 1,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 250,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val construct_rhs:
#hsz:hash_size_t ->
#hash_spec:Ghost.erased (MTS.hash_fun_t #(U32.v hsz)) ->
lv:uint32_t{lv <= merkle_tree_size_lg} ->
hs:hash_vv hsz {V.size_of hs = merkle_tree_size_lg} ->
rhs:hash_vec #hsz {V.size_of rhs = merkle_tree_size_lg} ->
i:index_t ->
j:index_t{i <= j && (U32.v j) < pow2 (32 - U32.v lv)} ->
acc:hash #hsz ->
actd:bool ->
hash_fun:hash_fun_t #hsz #(Ghost.reveal hash_spec) ->
HST.ST unit
(requires (fun h0 ->
RV.rv_inv h0 hs /\ RV.rv_inv h0 rhs /\
HH.disjoint (V.frameOf hs) (V.frameOf rhs) /\
Rgl?.r_inv (hreg hsz) h0 acc /\
HH.disjoint (B.frameOf acc) (V.frameOf hs) /\
HH.disjoint (B.frameOf acc) (V.frameOf rhs) /\
mt_safe_elts #hsz h0 lv hs i j))
(ensures (fun h0 _ h1 ->
// memory safety
modifies (loc_union
(RV.loc_rvector rhs)
(B.loc_all_regions_from false (B.frameOf acc)))
h0 h1 /\
RV.rv_inv h1 rhs /\
Rgl?.r_inv (hreg hsz) h1 acc /\
// correctness
(mt_safe_elts_spec #hsz h0 lv hs i j;
MTH.construct_rhs #(U32.v hsz) #(Ghost.reveal hash_spec)
(U32.v lv)
(Rgl?.r_repr (hvvreg hsz) h0 hs)
(Rgl?.r_repr (hvreg hsz) h0 rhs)
(U32.v i) (U32.v j)
(Rgl?.r_repr (hreg hsz) h0 acc) actd ==
(Rgl?.r_repr (hvreg hsz) h1 rhs, Rgl?.r_repr (hreg hsz) h1 acc)
)))
(decreases (U32.v j)) | [
"recursion"
] | MerkleTree.Low.construct_rhs | {
"file_name": "src/MerkleTree.Low.fst",
"git_rev": "3b0f086655c145aa23f58a97259ebf4cf112a4a3",
"git_url": "https://github.com/hacl-star/merkle-tree.git",
"project_name": "merkle-tree"
} |
lv: LowStar.Vector.uint32_t{lv <= MerkleTree.Low.merkle_tree_size_lg} ->
hs:
MerkleTree.Low.Datastructures.hash_vv hsz
{LowStar.Vector.size_of hs = MerkleTree.Low.merkle_tree_size_lg} ->
rhs:
MerkleTree.Low.Datastructures.hash_vec
{LowStar.Vector.size_of rhs = MerkleTree.Low.merkle_tree_size_lg} ->
i: MerkleTree.Low.index_t ->
j: MerkleTree.Low.index_t{i <= j && FStar.UInt32.v j < Prims.pow2 (32 - FStar.UInt32.v lv)} ->
acc: MerkleTree.Low.Datastructures.hash ->
actd: Prims.bool ->
hash_fun: MerkleTree.Low.Hashfunctions.hash_fun_t
-> FStar.HyperStack.ST.ST Prims.unit | {
"end_col": 5,
"end_line": 1509,
"start_col": 71,
"start_line": 1354
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Inline.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Inline.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let u1024 = b:B.buffer UInt64.t{B.length b == 16} | let u1024 = | false | null | false | b: B.buffer UInt64.t {B.length b == 16} | {
"checked_file": "Vale.Inline.X64.Fmul_inline.fsti.checked",
"dependencies": [
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Inline.X64.Fmul_inline.fsti"
} | [
"total"
] | [
"LowStar.Buffer.buffer",
"FStar.UInt64.t",
"Prims.eq2",
"Prims.int",
"LowStar.Monotonic.Buffer.length",
"LowStar.Buffer.trivial_preorder"
] | [] | module Vale.Inline.X64.Fmul_inline
open Vale.X64.CPU_Features_s
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open Vale.Curve25519.Fast_defs
open FStar.Mul
unfold
let u256 = b:B.buffer UInt64.t{B.length b == 4}
unfold
let u512 = b:B.buffer UInt64.t{B.length b == 8} | false | true | Vale.Inline.X64.Fmul_inline.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val u1024 : Type0 | [] | Vale.Inline.X64.Fmul_inline.u1024 | {
"file_name": "vale/code/arch/x64/interop/Vale.Inline.X64.Fmul_inline.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 49,
"end_line": 15,
"start_col": 12,
"start_line": 15
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Inline.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Inline.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let u256 = b:B.buffer UInt64.t{B.length b == 4} | let u256 = | false | null | false | b: B.buffer UInt64.t {B.length b == 4} | {
"checked_file": "Vale.Inline.X64.Fmul_inline.fsti.checked",
"dependencies": [
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Inline.X64.Fmul_inline.fsti"
} | [
"total"
] | [
"LowStar.Buffer.buffer",
"FStar.UInt64.t",
"Prims.eq2",
"Prims.int",
"LowStar.Monotonic.Buffer.length",
"LowStar.Buffer.trivial_preorder"
] | [] | module Vale.Inline.X64.Fmul_inline
open Vale.X64.CPU_Features_s
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open Vale.Curve25519.Fast_defs
open FStar.Mul | false | true | Vale.Inline.X64.Fmul_inline.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val u256 : Type0 | [] | Vale.Inline.X64.Fmul_inline.u256 | {
"file_name": "vale/code/arch/x64/interop/Vale.Inline.X64.Fmul_inline.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 47,
"end_line": 11,
"start_col": 11,
"start_line": 11
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Inline.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Inline.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let u512 = b:B.buffer UInt64.t{B.length b == 8} | let u512 = | false | null | false | b: B.buffer UInt64.t {B.length b == 8} | {
"checked_file": "Vale.Inline.X64.Fmul_inline.fsti.checked",
"dependencies": [
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Inline.X64.Fmul_inline.fsti"
} | [
"total"
] | [
"LowStar.Buffer.buffer",
"FStar.UInt64.t",
"Prims.eq2",
"Prims.int",
"LowStar.Monotonic.Buffer.length",
"LowStar.Buffer.trivial_preorder"
] | [] | module Vale.Inline.X64.Fmul_inline
open Vale.X64.CPU_Features_s
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open Vale.Curve25519.Fast_defs
open FStar.Mul
unfold
let u256 = b:B.buffer UInt64.t{B.length b == 4} | false | true | Vale.Inline.X64.Fmul_inline.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val u512 : Type0 | [] | Vale.Inline.X64.Fmul_inline.u512 | {
"file_name": "vale/code/arch/x64/interop/Vale.Inline.X64.Fmul_inline.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 47,
"end_line": 13,
"start_col": 11,
"start_line": 13
} |
|
Prims.GTot | val as_nat (b: B.buffer UInt64.t {B.length b == 4}) (h: HS.mem) : GTot nat | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Curve25519.Fast_defs",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack",
"short_module": "HS"
},
{
"abbrev": true,
"full_module": "LowStar.Buffer",
"short_module": "B"
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.ST",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Inline.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Inline.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let as_nat (b:B.buffer UInt64.t{B.length b == 4}) (h:HS.mem) : GTot nat =
let s = B.as_seq h b in
let s0 = UInt64.v (Seq.index s 0) in
let s1 = UInt64.v (Seq.index s 1) in
let s2 = UInt64.v (Seq.index s 2) in
let s3 = UInt64.v (Seq.index s 3) in
pow2_four s0 s1 s2 s3 | val as_nat (b: B.buffer UInt64.t {B.length b == 4}) (h: HS.mem) : GTot nat
let as_nat (b: B.buffer UInt64.t {B.length b == 4}) (h: HS.mem) : GTot nat = | false | null | false | let s = B.as_seq h b in
let s0 = UInt64.v (Seq.index s 0) in
let s1 = UInt64.v (Seq.index s 1) in
let s2 = UInt64.v (Seq.index s 2) in
let s3 = UInt64.v (Seq.index s 3) in
pow2_four s0 s1 s2 s3 | {
"checked_file": "Vale.Inline.X64.Fmul_inline.fsti.checked",
"dependencies": [
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Curve25519.Fast_defs.fst.checked",
"prims.fst.checked",
"LowStar.Buffer.fst.checked",
"FStar.UInt64.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.fst.checked"
],
"interface_file": false,
"source_file": "Vale.Inline.X64.Fmul_inline.fsti"
} | [
"sometrivial"
] | [
"LowStar.Buffer.buffer",
"FStar.UInt64.t",
"Prims.eq2",
"Prims.int",
"LowStar.Monotonic.Buffer.length",
"LowStar.Buffer.trivial_preorder",
"FStar.Monotonic.HyperStack.mem",
"Vale.Curve25519.Fast_defs.pow2_four",
"FStar.UInt.uint_t",
"FStar.UInt64.v",
"FStar.Seq.Base.index",
"FStar.Seq.Base.seq",
"LowStar.Monotonic.Buffer.as_seq",
"Prims.nat"
] | [] | module Vale.Inline.X64.Fmul_inline
open Vale.X64.CPU_Features_s
open FStar.HyperStack.ST
module B = LowStar.Buffer
module HS = FStar.HyperStack
open Vale.Curve25519.Fast_defs
open FStar.Mul
unfold
let u256 = b:B.buffer UInt64.t{B.length b == 4}
unfold
let u512 = b:B.buffer UInt64.t{B.length b == 8}
unfold
let u1024 = b:B.buffer UInt64.t{B.length b == 16} | false | false | Vale.Inline.X64.Fmul_inline.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val as_nat (b: B.buffer UInt64.t {B.length b == 4}) (h: HS.mem) : GTot nat | [] | Vale.Inline.X64.Fmul_inline.as_nat | {
"file_name": "vale/code/arch/x64/interop/Vale.Inline.X64.Fmul_inline.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
b: LowStar.Buffer.buffer FStar.UInt64.t {LowStar.Monotonic.Buffer.length b == 4} ->
h: FStar.Monotonic.HyperStack.mem
-> Prims.GTot Prims.nat | {
"end_col": 23,
"end_line": 23,
"start_col": 73,
"start_line": 17
} |
FStar.Pervasives.Lemma | val lemma_nat_from_bytes_le_append (k1 k2: bytes)
: Lemma (requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures
nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2) | [
{
"abbrev": true,
"full_module": "Hacl.Spec.BignumQ.Definitions",
"short_module": "S56"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.BignumQ.Mul",
"short_module": "F56"
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_nat_from_bytes_le_append (k1 k2:bytes) : Lemma
(requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2) =
let k = Seq.append k1 k2 in
let n = Seq.length k1 + Seq.length k2 in
nat_from_intseq_le_slice_lemma #U8 #SEC #n k (Seq.length k1);
assert (k1 `Seq.equal` Seq.slice k 0 (Seq.length k1));
assert (k2 `Seq.equal` Seq.slice k (Seq.length k1) n) | val lemma_nat_from_bytes_le_append (k1 k2: bytes)
: Lemma (requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures
nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2)
let lemma_nat_from_bytes_le_append (k1 k2: bytes)
: Lemma (requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures
nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2) = | false | null | true | let k = Seq.append k1 k2 in
let n = Seq.length k1 + Seq.length k2 in
nat_from_intseq_le_slice_lemma #U8 #SEC #n k (Seq.length k1);
assert (k1 `Seq.equal` (Seq.slice k 0 (Seq.length k1)));
assert (k2 `Seq.equal` (Seq.slice k (Seq.length k1) n)) | {
"checked_file": "Hacl.Impl.Load56.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.BignumQ.Definitions.fst.checked",
"Hacl.Impl.BignumQ.Mul.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Load56.fst"
} | [
"lemma"
] | [
"Lib.ByteSequence.bytes",
"Prims._assert",
"FStar.Seq.Base.equal",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"FStar.Seq.Base.slice",
"FStar.Seq.Base.length",
"Prims.unit",
"Lib.ByteSequence.nat_from_intseq_le_slice_lemma",
"Prims.int",
"Prims.op_Addition",
"FStar.Seq.Base.seq",
"Lib.IntTypes.int_t",
"FStar.Seq.Base.append",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Lib.IntTypes.max_size_t",
"Prims.squash",
"Prims.eq2",
"Lib.ByteSequence.nat_from_bytes_le",
"FStar.Mul.op_Star",
"Prims.pow2",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Hacl.Impl.Load56
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.ByteSequence
open Lib.Buffer
open Lib.ByteBuffer
module F56 = Hacl.Impl.BignumQ.Mul
module S56 = Hacl.Spec.BignumQ.Definitions
#reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 0"
inline_for_extraction noextract
val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z'
let lemma_nat_from_bytes_le_append (k1 k2:bytes) : Lemma
(requires Seq.length k1 + Seq.length k2 <= max_size_t) | false | false | Hacl.Impl.Load56.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_nat_from_bytes_le_append (k1 k2: bytes)
: Lemma (requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures
nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2) | [] | Hacl.Impl.Load56.lemma_nat_from_bytes_le_append | {
"file_name": "code/ed25519/Hacl.Impl.Load56.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | k1: Lib.ByteSequence.bytes -> k2: Lib.ByteSequence.bytes
-> FStar.Pervasives.Lemma
(requires FStar.Seq.Base.length k1 + FStar.Seq.Base.length k2 <= Lib.IntTypes.max_size_t)
(ensures
Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.append k1 k2) ==
Lib.ByteSequence.nat_from_bytes_le k1 +
Prims.pow2 (FStar.Seq.Base.length k1 * 8) * Lib.ByteSequence.nat_from_bytes_le k2) | {
"end_col": 55,
"end_line": 70,
"start_col": 75,
"start_line": 65
} |
FStar.HyperStack.ST.Stack | val load_32_bytes:
out:lbuffer uint64 5ul
-> b:lbuffer uint8 32ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_fits h1 out (1, 1, 1, 1, 1)
) | [
{
"abbrev": true,
"full_module": "Hacl.Spec.BignumQ.Definitions",
"short_module": "S56"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.BignumQ.Mul",
"short_module": "F56"
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let load_32_bytes out b =
let h0 = ST.get() in
let b0 = hload56_le' b 0ul in
let b1 = hload56_le' b 7ul in
let b2 = hload56_le' b 14ul in
let b3 = hload56_le' b 21ul in
let b4 = uint_from_bytes_le #U32 (sub b 28ul 4ul) in
let b4 = to_u64 b4 in
lemma_reveal_uint_to_bytes_le #U32 (as_seq h0 (gsub b 28ul 4ul));
lemma_load_32_bytes (as_seq h0 b) b0 b1 b2 b3 b4;
Hacl.Bignum25519.make_u64_5 out b0 b1 b2 b3 b4 | val load_32_bytes:
out:lbuffer uint64 5ul
-> b:lbuffer uint8 32ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_fits h1 out (1, 1, 1, 1, 1)
)
let load_32_bytes out b = | true | null | false | let h0 = ST.get () in
let b0 = hload56_le' b 0ul in
let b1 = hload56_le' b 7ul in
let b2 = hload56_le' b 14ul in
let b3 = hload56_le' b 21ul in
let b4 = uint_from_bytes_le #U32 (sub b 28ul 4ul) in
let b4 = to_u64 b4 in
lemma_reveal_uint_to_bytes_le #U32 (as_seq h0 (gsub b 28ul 4ul));
lemma_load_32_bytes (as_seq h0 b) b0 b1 b2 b3 b4;
Hacl.Bignum25519.make_u64_5 out b0 b1 b2 b3 b4 | {
"checked_file": "Hacl.Impl.Load56.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.BignumQ.Definitions.fst.checked",
"Hacl.Impl.BignumQ.Mul.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Load56.fst"
} | [] | [
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint64",
"FStar.UInt32.__uint_to_t",
"Lib.IntTypes.uint8",
"Hacl.Bignum25519.make_u64_5",
"Prims.unit",
"Hacl.Impl.Load56.lemma_load_32_bytes",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Lib.ByteSequence.lemma_reveal_uint_to_bytes_le",
"Lib.IntTypes.U32",
"Lib.IntTypes.SEC",
"Lib.Buffer.gsub",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.to_u64",
"Lib.ByteBuffer.uint_from_bytes_le",
"Lib.IntTypes.uint_t",
"Lib.Buffer.lbuffer_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.mk_int",
"Lib.IntTypes.PUB",
"Lib.Buffer.sub",
"Hacl.Impl.Load56.hload56_le'",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get"
] | [] | module Hacl.Impl.Load56
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.ByteSequence
open Lib.Buffer
open Lib.ByteBuffer
module F56 = Hacl.Impl.BignumQ.Mul
module S56 = Hacl.Spec.BignumQ.Definitions
#reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 0"
inline_for_extraction noextract
val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z'
let lemma_nat_from_bytes_le_append (k1 k2:bytes) : Lemma
(requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2) =
let k = Seq.append k1 k2 in
let n = Seq.length k1 + Seq.length k2 in
nat_from_intseq_le_slice_lemma #U8 #SEC #n k (Seq.length k1);
assert (k1 `Seq.equal` Seq.slice k 0 (Seq.length k1));
assert (k2 `Seq.equal` Seq.slice k (Seq.length k1) n)
#push-options "--z3rlimit 100"
let lemma_load_64_bytes (k:lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9:uint64) : Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\
v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\
v b9 == v (Seq.index k 63)
)
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k)
=
lemma_nat_from_bytes_le_append (Seq.slice k 0 7) (Seq.slice k 7 14);
lemma_nat_from_bytes_le_append (Seq.slice k 0 14) (Seq.slice k 14 21);
lemma_nat_from_bytes_le_append (Seq.slice k 0 21) (Seq.slice k 21 28);
lemma_nat_from_bytes_le_append (Seq.slice k 0 28) (Seq.slice k 28 35);
lemma_nat_from_bytes_le_append (Seq.slice k 0 35) (Seq.slice k 35 42);
lemma_nat_from_bytes_le_append (Seq.slice k 0 42) (Seq.slice k 42 49);
lemma_nat_from_bytes_le_append (Seq.slice k 0 49) (Seq.slice k 49 56);
lemma_nat_from_bytes_le_append (Seq.slice k 0 56) (Seq.slice k 56 63);
lemma_nat_from_bytes_le_append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63));
assert (Seq.append (Seq.slice k 0 7) (Seq.slice k 7 14) `Seq.equal` Seq.slice k 0 14);
assert (Seq.append (Seq.slice k 0 14) (Seq.slice k 14 21) `Seq.equal` Seq.slice k 0 21);
assert (Seq.append (Seq.slice k 0 21) (Seq.slice k 21 28) `Seq.equal` Seq.slice k 0 28);
assert (Seq.append (Seq.slice k 0 28) (Seq.slice k 28 35) `Seq.equal` Seq.slice k 0 35);
assert (Seq.append (Seq.slice k 0 35) (Seq.slice k 35 42) `Seq.equal` Seq.slice k 0 42);
assert (Seq.append (Seq.slice k 0 42) (Seq.slice k 42 49) `Seq.equal` Seq.slice k 0 49);
assert (Seq.append (Seq.slice k 0 49) (Seq.slice k 49 56) `Seq.equal` Seq.slice k 0 56);
assert (Seq.append (Seq.slice k 0 56) (Seq.slice k 56 63) `Seq.equal` Seq.slice k 0 63);
assert (Seq.append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63)) `Seq.equal` k);
nat_from_intseq_le_lemma0 (Seq.create 1 (Seq.index k 63));
assert_norm (pow2 56 == 0x100000000000000);
assert_norm (pow2 112 == 0x10000000000000000000000000000);
assert_norm (pow2 168 == 0x1000000000000000000000000000000000000000000);
assert_norm (pow2 224 == 0x100000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 280 == 0x10000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 336 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 392 == 0x100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 448 == 0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 504 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000)
#pop-options
val load_64_bytes:
out:lbuffer uint64 10ul
-> b:lbuffer uint8 64ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.wide_as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_wide_fits h1 out (1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
)
[@CInline]
let load_64_bytes out b =
let h0 = ST.get() in
let b0 = hload56_le b 0ul in
let b1 = hload56_le b 7ul in
let b2 = hload56_le b 14ul in
let b3 = hload56_le b 21ul in
let b4 = hload56_le b 28ul in
let b5 = hload56_le b 35ul in
let b6 = hload56_le b 42ul in
let b7 = hload56_le b 49ul in
let b8 = hload56_le b 56ul in
let b63 = b.(63ul) in
let b9 = to_u64 b63 in
lemma_load_64_bytes (as_seq h0 b) b0 b1 b2 b3 b4 b5 b6 b7 b8 b9;
Hacl.Bignum25519.make_u64_10 out b0 b1 b2 b3 b4 b5 b6 b7 b8 b9
inline_for_extraction noextract
val hload56_le':
b:lbuffer uint8 32ul
-> off:size_t{v off <= 21} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le' b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z'
let lemma_load_32_bytes (k:lbytes 32) (b0 b1 b2 b3 b4:uint64) : Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\
v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 32))
(ensures S56.as_nat5 (b0, b1, b2, b3, b4) == nat_from_bytes_le k)
=
lemma_nat_from_bytes_le_append (Seq.slice k 0 7) (Seq.slice k 7 14);
lemma_nat_from_bytes_le_append (Seq.slice k 0 14) (Seq.slice k 14 21);
lemma_nat_from_bytes_le_append (Seq.slice k 0 21) (Seq.slice k 21 28);
lemma_nat_from_bytes_le_append (Seq.slice k 0 28) (Seq.slice k 28 32);
assert (Seq.append (Seq.slice k 0 7) (Seq.slice k 7 14) `Seq.equal` Seq.slice k 0 14);
assert (Seq.append (Seq.slice k 0 14) (Seq.slice k 14 21) `Seq.equal` Seq.slice k 0 21);
assert (Seq.append (Seq.slice k 0 21) (Seq.slice k 21 28) `Seq.equal` Seq.slice k 0 28);
assert (Seq.append (Seq.slice k 0 28) (Seq.slice k 28 32) `Seq.equal` k);
assert_norm (pow2 56 == 0x100000000000000);
assert_norm (pow2 112 == 0x10000000000000000000000000000);
assert_norm (pow2 168 == 0x1000000000000000000000000000000000000000000);
assert_norm (pow2 224 == 0x100000000000000000000000000000000000000000000000000000000)
val load_32_bytes:
out:lbuffer uint64 5ul
-> b:lbuffer uint8 32ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_fits h1 out (1, 1, 1, 1, 1)
) | false | false | Hacl.Impl.Load56.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val load_32_bytes:
out:lbuffer uint64 5ul
-> b:lbuffer uint8 32ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_fits h1 out (1, 1, 1, 1, 1)
) | [] | Hacl.Impl.Load56.load_32_bytes | {
"file_name": "code/ed25519/Hacl.Impl.Load56.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | out: Lib.Buffer.lbuffer Lib.IntTypes.uint64 5ul -> b: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 48,
"end_line": 235,
"start_col": 25,
"start_line": 225
} |
FStar.HyperStack.ST.Stack | val load_64_bytes:
out:lbuffer uint64 10ul
-> b:lbuffer uint8 64ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.wide_as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_wide_fits h1 out (1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
) | [
{
"abbrev": true,
"full_module": "Hacl.Spec.BignumQ.Definitions",
"short_module": "S56"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.BignumQ.Mul",
"short_module": "F56"
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let load_64_bytes out b =
let h0 = ST.get() in
let b0 = hload56_le b 0ul in
let b1 = hload56_le b 7ul in
let b2 = hload56_le b 14ul in
let b3 = hload56_le b 21ul in
let b4 = hload56_le b 28ul in
let b5 = hload56_le b 35ul in
let b6 = hload56_le b 42ul in
let b7 = hload56_le b 49ul in
let b8 = hload56_le b 56ul in
let b63 = b.(63ul) in
let b9 = to_u64 b63 in
lemma_load_64_bytes (as_seq h0 b) b0 b1 b2 b3 b4 b5 b6 b7 b8 b9;
Hacl.Bignum25519.make_u64_10 out b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 | val load_64_bytes:
out:lbuffer uint64 10ul
-> b:lbuffer uint8 64ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.wide_as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_wide_fits h1 out (1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
)
let load_64_bytes out b = | true | null | false | let h0 = ST.get () in
let b0 = hload56_le b 0ul in
let b1 = hload56_le b 7ul in
let b2 = hload56_le b 14ul in
let b3 = hload56_le b 21ul in
let b4 = hload56_le b 28ul in
let b5 = hload56_le b 35ul in
let b6 = hload56_le b 42ul in
let b7 = hload56_le b 49ul in
let b8 = hload56_le b 56ul in
let b63 = b.(63ul) in
let b9 = to_u64 b63 in
lemma_load_64_bytes (as_seq h0 b) b0 b1 b2 b3 b4 b5 b6 b7 b8 b9;
Hacl.Bignum25519.make_u64_10 out b0 b1 b2 b3 b4 b5 b6 b7 b8 b9 | {
"checked_file": "Hacl.Impl.Load56.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.BignumQ.Definitions.fst.checked",
"Hacl.Impl.BignumQ.Mul.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Load56.fst"
} | [] | [
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint64",
"FStar.UInt32.__uint_to_t",
"Lib.IntTypes.uint8",
"Hacl.Bignum25519.make_u64_10",
"Prims.unit",
"Hacl.Impl.Load56.lemma_load_64_bytes",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Lib.IntTypes.int_t",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"Lib.IntTypes.to_u64",
"Lib.IntTypes.U8",
"Lib.Buffer.op_Array_Access",
"Hacl.Impl.Load56.hload56_le",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get"
] | [] | module Hacl.Impl.Load56
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.ByteSequence
open Lib.Buffer
open Lib.ByteBuffer
module F56 = Hacl.Impl.BignumQ.Mul
module S56 = Hacl.Spec.BignumQ.Definitions
#reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 0"
inline_for_extraction noextract
val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z'
let lemma_nat_from_bytes_le_append (k1 k2:bytes) : Lemma
(requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2) =
let k = Seq.append k1 k2 in
let n = Seq.length k1 + Seq.length k2 in
nat_from_intseq_le_slice_lemma #U8 #SEC #n k (Seq.length k1);
assert (k1 `Seq.equal` Seq.slice k 0 (Seq.length k1));
assert (k2 `Seq.equal` Seq.slice k (Seq.length k1) n)
#push-options "--z3rlimit 100"
let lemma_load_64_bytes (k:lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9:uint64) : Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\
v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\
v b9 == v (Seq.index k 63)
)
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k)
=
lemma_nat_from_bytes_le_append (Seq.slice k 0 7) (Seq.slice k 7 14);
lemma_nat_from_bytes_le_append (Seq.slice k 0 14) (Seq.slice k 14 21);
lemma_nat_from_bytes_le_append (Seq.slice k 0 21) (Seq.slice k 21 28);
lemma_nat_from_bytes_le_append (Seq.slice k 0 28) (Seq.slice k 28 35);
lemma_nat_from_bytes_le_append (Seq.slice k 0 35) (Seq.slice k 35 42);
lemma_nat_from_bytes_le_append (Seq.slice k 0 42) (Seq.slice k 42 49);
lemma_nat_from_bytes_le_append (Seq.slice k 0 49) (Seq.slice k 49 56);
lemma_nat_from_bytes_le_append (Seq.slice k 0 56) (Seq.slice k 56 63);
lemma_nat_from_bytes_le_append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63));
assert (Seq.append (Seq.slice k 0 7) (Seq.slice k 7 14) `Seq.equal` Seq.slice k 0 14);
assert (Seq.append (Seq.slice k 0 14) (Seq.slice k 14 21) `Seq.equal` Seq.slice k 0 21);
assert (Seq.append (Seq.slice k 0 21) (Seq.slice k 21 28) `Seq.equal` Seq.slice k 0 28);
assert (Seq.append (Seq.slice k 0 28) (Seq.slice k 28 35) `Seq.equal` Seq.slice k 0 35);
assert (Seq.append (Seq.slice k 0 35) (Seq.slice k 35 42) `Seq.equal` Seq.slice k 0 42);
assert (Seq.append (Seq.slice k 0 42) (Seq.slice k 42 49) `Seq.equal` Seq.slice k 0 49);
assert (Seq.append (Seq.slice k 0 49) (Seq.slice k 49 56) `Seq.equal` Seq.slice k 0 56);
assert (Seq.append (Seq.slice k 0 56) (Seq.slice k 56 63) `Seq.equal` Seq.slice k 0 63);
assert (Seq.append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63)) `Seq.equal` k);
nat_from_intseq_le_lemma0 (Seq.create 1 (Seq.index k 63));
assert_norm (pow2 56 == 0x100000000000000);
assert_norm (pow2 112 == 0x10000000000000000000000000000);
assert_norm (pow2 168 == 0x1000000000000000000000000000000000000000000);
assert_norm (pow2 224 == 0x100000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 280 == 0x10000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 336 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 392 == 0x100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 448 == 0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 504 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000)
#pop-options
val load_64_bytes:
out:lbuffer uint64 10ul
-> b:lbuffer uint8 64ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.wide_as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_wide_fits h1 out (1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
) | false | false | Hacl.Impl.Load56.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val load_64_bytes:
out:lbuffer uint64 10ul
-> b:lbuffer uint8 64ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.wide_as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_wide_fits h1 out (1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
) | [] | Hacl.Impl.Load56.load_64_bytes | {
"file_name": "code/ed25519/Hacl.Impl.Load56.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | out: Lib.Buffer.lbuffer Lib.IntTypes.uint64 10ul -> b: Lib.Buffer.lbuffer Lib.IntTypes.uint8 64ul
-> FStar.HyperStack.ST.Stack Prims.unit | {
"end_col": 64,
"end_line": 145,
"start_col": 25,
"start_line": 131
} |
FStar.HyperStack.ST.Stack | val hload56_le':
b:lbuffer uint8 32ul
-> off:size_t{v off <= 21} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
) | [
{
"abbrev": true,
"full_module": "Hacl.Spec.BignumQ.Definitions",
"short_module": "S56"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.BignumQ.Mul",
"short_module": "F56"
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let hload56_le' b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z' | val hload56_le':
b:lbuffer uint8 32ul
-> off:size_t{v off <= 21} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le' b off = | true | null | false | let h0 = ST.get () in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56);
calc ( == ) {
v z' <: nat;
( == ) { () }
v (z &. u64 0xffffffffffffff);
( == ) { logand_spec z (u64 0xffffffffffffff) }
(v z) `logand_v` 0xffffffffffffff;
( == ) { (assert_norm (pow2 56 - 1 == 0xffffffffffffff);
UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56) }
(v z % pow2 56);
( == ) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
( == ) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) %
pow2 56;
( == ) { (FStar.Math.Lemmas.lemma_mod_plus_distr_r (nat_from_bytes_le (Seq.slice (as_seq h0 b8)
0
7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56)) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal (Seq.slice (as_seq h0 b) (v off) (v off + 7)) (Seq.slice (as_seq h0 b8) 0 7));
z' | {
"checked_file": "Hacl.Impl.Load56.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.BignumQ.Definitions.fst.checked",
"Hacl.Impl.BignumQ.Mul.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Load56.fst"
} | [] | [
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t",
"Lib.IntTypes.size_t",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Prims.unit",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Prims.op_Addition",
"FStar.Calc.calc_finish",
"Prims.nat",
"Prims.eq2",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"Lib.ByteSequence.nat_from_bytes_le",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"FStar.Calc.calc_step",
"Prims.op_Modulus",
"FStar.Mul.op_Star",
"Prims.pow2",
"Lib.IntTypes.logand_v",
"Lib.IntTypes.op_Amp_Dot",
"Lib.IntTypes.u64",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"Prims.squash",
"Lib.IntTypes.logand_spec",
"FStar.UInt.logand_mask",
"FStar.UInt.to_uint_t",
"FStar.Pervasives.assert_norm",
"Prims.int",
"Prims.op_Subtraction",
"Lib.ByteSequence.lemma_reveal_uint_to_bytes_le",
"Lib.ByteSequence.nat_from_intseq_le_slice_lemma",
"Lib.IntTypes.U8",
"FStar.Math.Lemmas.cancel_mul_mod",
"FStar.Math.Lemmas.swap_mul",
"FStar.Math.Lemmas.lemma_mod_plus_distr_r",
"Lib.IntTypes.int_t",
"Lib.IntTypes.uint64",
"Lib.ByteBuffer.uint_from_bytes_le",
"Lib.IntTypes.uint_t",
"Lib.Buffer.lbuffer_t",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Lib.Buffer.sub",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get"
] | [] | module Hacl.Impl.Load56
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.ByteSequence
open Lib.Buffer
open Lib.ByteBuffer
module F56 = Hacl.Impl.BignumQ.Mul
module S56 = Hacl.Spec.BignumQ.Definitions
#reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 0"
inline_for_extraction noextract
val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z'
let lemma_nat_from_bytes_le_append (k1 k2:bytes) : Lemma
(requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2) =
let k = Seq.append k1 k2 in
let n = Seq.length k1 + Seq.length k2 in
nat_from_intseq_le_slice_lemma #U8 #SEC #n k (Seq.length k1);
assert (k1 `Seq.equal` Seq.slice k 0 (Seq.length k1));
assert (k2 `Seq.equal` Seq.slice k (Seq.length k1) n)
#push-options "--z3rlimit 100"
let lemma_load_64_bytes (k:lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9:uint64) : Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\
v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\
v b9 == v (Seq.index k 63)
)
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k)
=
lemma_nat_from_bytes_le_append (Seq.slice k 0 7) (Seq.slice k 7 14);
lemma_nat_from_bytes_le_append (Seq.slice k 0 14) (Seq.slice k 14 21);
lemma_nat_from_bytes_le_append (Seq.slice k 0 21) (Seq.slice k 21 28);
lemma_nat_from_bytes_le_append (Seq.slice k 0 28) (Seq.slice k 28 35);
lemma_nat_from_bytes_le_append (Seq.slice k 0 35) (Seq.slice k 35 42);
lemma_nat_from_bytes_le_append (Seq.slice k 0 42) (Seq.slice k 42 49);
lemma_nat_from_bytes_le_append (Seq.slice k 0 49) (Seq.slice k 49 56);
lemma_nat_from_bytes_le_append (Seq.slice k 0 56) (Seq.slice k 56 63);
lemma_nat_from_bytes_le_append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63));
assert (Seq.append (Seq.slice k 0 7) (Seq.slice k 7 14) `Seq.equal` Seq.slice k 0 14);
assert (Seq.append (Seq.slice k 0 14) (Seq.slice k 14 21) `Seq.equal` Seq.slice k 0 21);
assert (Seq.append (Seq.slice k 0 21) (Seq.slice k 21 28) `Seq.equal` Seq.slice k 0 28);
assert (Seq.append (Seq.slice k 0 28) (Seq.slice k 28 35) `Seq.equal` Seq.slice k 0 35);
assert (Seq.append (Seq.slice k 0 35) (Seq.slice k 35 42) `Seq.equal` Seq.slice k 0 42);
assert (Seq.append (Seq.slice k 0 42) (Seq.slice k 42 49) `Seq.equal` Seq.slice k 0 49);
assert (Seq.append (Seq.slice k 0 49) (Seq.slice k 49 56) `Seq.equal` Seq.slice k 0 56);
assert (Seq.append (Seq.slice k 0 56) (Seq.slice k 56 63) `Seq.equal` Seq.slice k 0 63);
assert (Seq.append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63)) `Seq.equal` k);
nat_from_intseq_le_lemma0 (Seq.create 1 (Seq.index k 63));
assert_norm (pow2 56 == 0x100000000000000);
assert_norm (pow2 112 == 0x10000000000000000000000000000);
assert_norm (pow2 168 == 0x1000000000000000000000000000000000000000000);
assert_norm (pow2 224 == 0x100000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 280 == 0x10000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 336 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 392 == 0x100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 448 == 0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 504 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000)
#pop-options
val load_64_bytes:
out:lbuffer uint64 10ul
-> b:lbuffer uint8 64ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.wide_as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_wide_fits h1 out (1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
)
[@CInline]
let load_64_bytes out b =
let h0 = ST.get() in
let b0 = hload56_le b 0ul in
let b1 = hload56_le b 7ul in
let b2 = hload56_le b 14ul in
let b3 = hload56_le b 21ul in
let b4 = hload56_le b 28ul in
let b5 = hload56_le b 35ul in
let b6 = hload56_le b 42ul in
let b7 = hload56_le b 49ul in
let b8 = hload56_le b 56ul in
let b63 = b.(63ul) in
let b9 = to_u64 b63 in
lemma_load_64_bytes (as_seq h0 b) b0 b1 b2 b3 b4 b5 b6 b7 b8 b9;
Hacl.Bignum25519.make_u64_10 out b0 b1 b2 b3 b4 b5 b6 b7 b8 b9
inline_for_extraction noextract
val hload56_le':
b:lbuffer uint8 32ul
-> off:size_t{v off <= 21} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
) | false | false | Hacl.Impl.Load56.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val hload56_le':
b:lbuffer uint8 32ul
-> off:size_t{v off <= 21} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
) | [] | Hacl.Impl.Load56.hload56_le' | {
"file_name": "code/ed25519/Hacl.Impl.Load56.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b: Lib.Buffer.lbuffer Lib.IntTypes.uint8 32ul -> off: Lib.IntTypes.size_t{Lib.IntTypes.v off <= 21}
-> FStar.HyperStack.ST.Stack Lib.IntTypes.uint64 | {
"end_col": 4,
"end_line": 190,
"start_col": 23,
"start_line": 159
} |
FStar.HyperStack.ST.Stack | val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
) | [
{
"abbrev": true,
"full_module": "Hacl.Spec.BignumQ.Definitions",
"short_module": "S56"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.BignumQ.Mul",
"short_module": "F56"
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let hload56_le b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z' | val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le b off = | true | null | false | let h0 = ST.get () in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56);
calc ( == ) {
v z' <: nat;
( == ) { () }
v (z &. u64 0xffffffffffffff);
( == ) { logand_spec z (u64 0xffffffffffffff) }
(v z) `logand_v` 0xffffffffffffff;
( == ) { (assert_norm (pow2 56 - 1 == 0xffffffffffffff);
UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56) }
(v z % pow2 56);
( == ) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
( == ) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) %
pow2 56;
( == ) { (FStar.Math.Lemmas.lemma_mod_plus_distr_r (nat_from_bytes_le (Seq.slice (as_seq h0 b8)
0
7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56)) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal (Seq.slice (as_seq h0 b) (v off) (v off + 7)) (Seq.slice (as_seq h0 b8) 0 7));
z' | {
"checked_file": "Hacl.Impl.Load56.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.BignumQ.Definitions.fst.checked",
"Hacl.Impl.BignumQ.Mul.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Load56.fst"
} | [] | [
"Lib.Buffer.lbuffer",
"Lib.IntTypes.uint8",
"FStar.UInt32.__uint_to_t",
"Lib.IntTypes.size_t",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"Lib.IntTypes.v",
"Lib.IntTypes.U32",
"Lib.IntTypes.PUB",
"Prims.unit",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice",
"Lib.Buffer.as_seq",
"Lib.Buffer.MUT",
"Prims.op_Addition",
"FStar.Calc.calc_finish",
"Prims.nat",
"Prims.eq2",
"Lib.IntTypes.U64",
"Lib.IntTypes.SEC",
"Lib.ByteSequence.nat_from_bytes_le",
"Prims.Cons",
"FStar.Preorder.relation",
"Prims.Nil",
"FStar.Calc.calc_step",
"Prims.op_Modulus",
"FStar.Mul.op_Star",
"Prims.pow2",
"Lib.IntTypes.logand_v",
"Lib.IntTypes.op_Amp_Dot",
"Lib.IntTypes.u64",
"FStar.Calc.calc_init",
"FStar.Calc.calc_pack",
"Prims.squash",
"Lib.IntTypes.logand_spec",
"FStar.UInt.logand_mask",
"FStar.UInt.to_uint_t",
"FStar.Pervasives.assert_norm",
"Prims.int",
"Prims.op_Subtraction",
"Lib.ByteSequence.lemma_reveal_uint_to_bytes_le",
"Lib.ByteSequence.nat_from_intseq_le_slice_lemma",
"Lib.IntTypes.U8",
"FStar.Math.Lemmas.cancel_mul_mod",
"FStar.Math.Lemmas.swap_mul",
"FStar.Math.Lemmas.lemma_mod_plus_distr_r",
"Lib.IntTypes.int_t",
"Lib.IntTypes.uint64",
"Lib.ByteBuffer.uint_from_bytes_le",
"Lib.IntTypes.uint_t",
"Lib.Buffer.lbuffer_t",
"FStar.UInt32.uint_to_t",
"FStar.UInt32.t",
"Lib.Buffer.sub",
"FStar.Monotonic.HyperStack.mem",
"FStar.HyperStack.ST.get"
] | [] | module Hacl.Impl.Load56
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.ByteSequence
open Lib.Buffer
open Lib.ByteBuffer
module F56 = Hacl.Impl.BignumQ.Mul
module S56 = Hacl.Spec.BignumQ.Definitions
#reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 0"
inline_for_extraction noextract
val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
) | false | false | Hacl.Impl.Load56.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
) | [] | Hacl.Impl.Load56.hload56_le | {
"file_name": "code/ed25519/Hacl.Impl.Load56.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | b: Lib.Buffer.lbuffer Lib.IntTypes.uint8 64ul -> off: Lib.IntTypes.size_t{Lib.IntTypes.v off <= 56}
-> FStar.HyperStack.ST.Stack Lib.IntTypes.uint64 | {
"end_col": 4,
"end_line": 59,
"start_col": 22,
"start_line": 28
} |
FStar.Pervasives.Lemma | val lemma_load_32_bytes (k: lbytes 32) (b0 b1 b2 b3 b4: uint64)
: Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\ v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 32))
(ensures S56.as_nat5 (b0, b1, b2, b3, b4) == nat_from_bytes_le k) | [
{
"abbrev": true,
"full_module": "Hacl.Spec.BignumQ.Definitions",
"short_module": "S56"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.BignumQ.Mul",
"short_module": "F56"
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_load_32_bytes (k:lbytes 32) (b0 b1 b2 b3 b4:uint64) : Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\
v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 32))
(ensures S56.as_nat5 (b0, b1, b2, b3, b4) == nat_from_bytes_le k)
=
lemma_nat_from_bytes_le_append (Seq.slice k 0 7) (Seq.slice k 7 14);
lemma_nat_from_bytes_le_append (Seq.slice k 0 14) (Seq.slice k 14 21);
lemma_nat_from_bytes_le_append (Seq.slice k 0 21) (Seq.slice k 21 28);
lemma_nat_from_bytes_le_append (Seq.slice k 0 28) (Seq.slice k 28 32);
assert (Seq.append (Seq.slice k 0 7) (Seq.slice k 7 14) `Seq.equal` Seq.slice k 0 14);
assert (Seq.append (Seq.slice k 0 14) (Seq.slice k 14 21) `Seq.equal` Seq.slice k 0 21);
assert (Seq.append (Seq.slice k 0 21) (Seq.slice k 21 28) `Seq.equal` Seq.slice k 0 28);
assert (Seq.append (Seq.slice k 0 28) (Seq.slice k 28 32) `Seq.equal` k);
assert_norm (pow2 56 == 0x100000000000000);
assert_norm (pow2 112 == 0x10000000000000000000000000000);
assert_norm (pow2 168 == 0x1000000000000000000000000000000000000000000);
assert_norm (pow2 224 == 0x100000000000000000000000000000000000000000000000000000000) | val lemma_load_32_bytes (k: lbytes 32) (b0 b1 b2 b3 b4: uint64)
: Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\ v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 32))
(ensures S56.as_nat5 (b0, b1, b2, b3, b4) == nat_from_bytes_le k)
let lemma_load_32_bytes (k: lbytes 32) (b0 b1 b2 b3 b4: uint64)
: Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\ v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 32))
(ensures S56.as_nat5 (b0, b1, b2, b3, b4) == nat_from_bytes_le k) = | false | null | true | lemma_nat_from_bytes_le_append (Seq.slice k 0 7) (Seq.slice k 7 14);
lemma_nat_from_bytes_le_append (Seq.slice k 0 14) (Seq.slice k 14 21);
lemma_nat_from_bytes_le_append (Seq.slice k 0 21) (Seq.slice k 21 28);
lemma_nat_from_bytes_le_append (Seq.slice k 0 28) (Seq.slice k 28 32);
assert ((Seq.append (Seq.slice k 0 7) (Seq.slice k 7 14)) `Seq.equal` (Seq.slice k 0 14));
assert ((Seq.append (Seq.slice k 0 14) (Seq.slice k 14 21)) `Seq.equal` (Seq.slice k 0 21));
assert ((Seq.append (Seq.slice k 0 21) (Seq.slice k 21 28)) `Seq.equal` (Seq.slice k 0 28));
assert ((Seq.append (Seq.slice k 0 28) (Seq.slice k 28 32)) `Seq.equal` k);
assert_norm (pow2 56 == 0x100000000000000);
assert_norm (pow2 112 == 0x10000000000000000000000000000);
assert_norm (pow2 168 == 0x1000000000000000000000000000000000000000000);
assert_norm (pow2 224 == 0x100000000000000000000000000000000000000000000000000000000) | {
"checked_file": "Hacl.Impl.Load56.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.BignumQ.Definitions.fst.checked",
"Hacl.Impl.BignumQ.Mul.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Load56.fst"
} | [
"lemma"
] | [
"Lib.ByteSequence.lbytes",
"Lib.IntTypes.uint64",
"FStar.Pervasives.assert_norm",
"Prims.eq2",
"Prims.int",
"Prims.pow2",
"Prims.unit",
"Prims._assert",
"FStar.Seq.Base.equal",
"Lib.IntTypes.uint_t",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"FStar.Seq.Base.append",
"FStar.Seq.Base.slice",
"Hacl.Impl.Load56.lemma_nat_from_bytes_le_append",
"Prims.l_and",
"Prims.l_or",
"Lib.IntTypes.range",
"Lib.IntTypes.U64",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"FStar.Mul.op_Star",
"Lib.Sequence.length",
"Lib.IntTypes.v",
"Lib.ByteSequence.nat_from_bytes_le",
"Prims.squash",
"Prims.nat",
"Hacl.Spec.BignumQ.Definitions.as_nat5",
"FStar.Pervasives.Native.Mktuple5",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Hacl.Impl.Load56
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.ByteSequence
open Lib.Buffer
open Lib.ByteBuffer
module F56 = Hacl.Impl.BignumQ.Mul
module S56 = Hacl.Spec.BignumQ.Definitions
#reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 0"
inline_for_extraction noextract
val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z'
let lemma_nat_from_bytes_le_append (k1 k2:bytes) : Lemma
(requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2) =
let k = Seq.append k1 k2 in
let n = Seq.length k1 + Seq.length k2 in
nat_from_intseq_le_slice_lemma #U8 #SEC #n k (Seq.length k1);
assert (k1 `Seq.equal` Seq.slice k 0 (Seq.length k1));
assert (k2 `Seq.equal` Seq.slice k (Seq.length k1) n)
#push-options "--z3rlimit 100"
let lemma_load_64_bytes (k:lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9:uint64) : Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\
v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\
v b9 == v (Seq.index k 63)
)
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k)
=
lemma_nat_from_bytes_le_append (Seq.slice k 0 7) (Seq.slice k 7 14);
lemma_nat_from_bytes_le_append (Seq.slice k 0 14) (Seq.slice k 14 21);
lemma_nat_from_bytes_le_append (Seq.slice k 0 21) (Seq.slice k 21 28);
lemma_nat_from_bytes_le_append (Seq.slice k 0 28) (Seq.slice k 28 35);
lemma_nat_from_bytes_le_append (Seq.slice k 0 35) (Seq.slice k 35 42);
lemma_nat_from_bytes_le_append (Seq.slice k 0 42) (Seq.slice k 42 49);
lemma_nat_from_bytes_le_append (Seq.slice k 0 49) (Seq.slice k 49 56);
lemma_nat_from_bytes_le_append (Seq.slice k 0 56) (Seq.slice k 56 63);
lemma_nat_from_bytes_le_append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63));
assert (Seq.append (Seq.slice k 0 7) (Seq.slice k 7 14) `Seq.equal` Seq.slice k 0 14);
assert (Seq.append (Seq.slice k 0 14) (Seq.slice k 14 21) `Seq.equal` Seq.slice k 0 21);
assert (Seq.append (Seq.slice k 0 21) (Seq.slice k 21 28) `Seq.equal` Seq.slice k 0 28);
assert (Seq.append (Seq.slice k 0 28) (Seq.slice k 28 35) `Seq.equal` Seq.slice k 0 35);
assert (Seq.append (Seq.slice k 0 35) (Seq.slice k 35 42) `Seq.equal` Seq.slice k 0 42);
assert (Seq.append (Seq.slice k 0 42) (Seq.slice k 42 49) `Seq.equal` Seq.slice k 0 49);
assert (Seq.append (Seq.slice k 0 49) (Seq.slice k 49 56) `Seq.equal` Seq.slice k 0 56);
assert (Seq.append (Seq.slice k 0 56) (Seq.slice k 56 63) `Seq.equal` Seq.slice k 0 63);
assert (Seq.append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63)) `Seq.equal` k);
nat_from_intseq_le_lemma0 (Seq.create 1 (Seq.index k 63));
assert_norm (pow2 56 == 0x100000000000000);
assert_norm (pow2 112 == 0x10000000000000000000000000000);
assert_norm (pow2 168 == 0x1000000000000000000000000000000000000000000);
assert_norm (pow2 224 == 0x100000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 280 == 0x10000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 336 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 392 == 0x100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 448 == 0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 504 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000)
#pop-options
val load_64_bytes:
out:lbuffer uint64 10ul
-> b:lbuffer uint8 64ul ->
Stack unit
(requires fun h -> live h out /\ live h b)
(ensures fun h0 _ h1 -> modifies (loc out) h0 h1 /\
F56.wide_as_nat h1 out == nat_from_bytes_le (as_seq h0 b) /\
F56.qelem_wide_fits h1 out (1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
)
[@CInline]
let load_64_bytes out b =
let h0 = ST.get() in
let b0 = hload56_le b 0ul in
let b1 = hload56_le b 7ul in
let b2 = hload56_le b 14ul in
let b3 = hload56_le b 21ul in
let b4 = hload56_le b 28ul in
let b5 = hload56_le b 35ul in
let b6 = hload56_le b 42ul in
let b7 = hload56_le b 49ul in
let b8 = hload56_le b 56ul in
let b63 = b.(63ul) in
let b9 = to_u64 b63 in
lemma_load_64_bytes (as_seq h0 b) b0 b1 b2 b3 b4 b5 b6 b7 b8 b9;
Hacl.Bignum25519.make_u64_10 out b0 b1 b2 b3 b4 b5 b6 b7 b8 b9
inline_for_extraction noextract
val hload56_le':
b:lbuffer uint8 32ul
-> off:size_t{v off <= 21} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le' b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z'
let lemma_load_32_bytes (k:lbytes 32) (b0 b1 b2 b3 b4:uint64) : Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\
v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 32))
(ensures S56.as_nat5 (b0, b1, b2, b3, b4) == nat_from_bytes_le k) | false | false | Hacl.Impl.Load56.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 50,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_load_32_bytes (k: lbytes 32) (b0 b1 b2 b3 b4: uint64)
: Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\ v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 32))
(ensures S56.as_nat5 (b0, b1, b2, b3, b4) == nat_from_bytes_le k) | [] | Hacl.Impl.Load56.lemma_load_32_bytes | {
"file_name": "code/ed25519/Hacl.Impl.Load56.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
k: Lib.ByteSequence.lbytes 32 ->
b0: Lib.IntTypes.uint64 ->
b1: Lib.IntTypes.uint64 ->
b2: Lib.IntTypes.uint64 ->
b3: Lib.IntTypes.uint64 ->
b4: Lib.IntTypes.uint64
-> FStar.Pervasives.Lemma
(requires
Lib.IntTypes.v b0 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 0 7) /\
Lib.IntTypes.v b1 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 7 14) /\
Lib.IntTypes.v b2 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 14 21) /\
Lib.IntTypes.v b3 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 21 28) /\
Lib.IntTypes.v b4 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 28 32))
(ensures
Hacl.Spec.BignumQ.Definitions.as_nat5 (b0, b1, b2, b3, b4) ==
Lib.ByteSequence.nat_from_bytes_le k) | {
"end_col": 87,
"end_line": 212,
"start_col": 2,
"start_line": 201
} |
FStar.Pervasives.Lemma | val lemma_load_64_bytes (k: lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9: uint64)
: Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\ v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\ v b9 == v (Seq.index k 63))
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k) | [
{
"abbrev": true,
"full_module": "Hacl.Spec.BignumQ.Definitions",
"short_module": "S56"
},
{
"abbrev": true,
"full_module": "Hacl.Impl.BignumQ.Mul",
"short_module": "F56"
},
{
"abbrev": false,
"full_module": "Lib.ByteBuffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.Buffer",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.ByteSequence",
"short_module": null
},
{
"abbrev": false,
"full_module": "Lib.IntTypes",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.HyperStack.All",
"short_module": null
},
{
"abbrev": true,
"full_module": "FStar.HyperStack.ST",
"short_module": "ST"
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Hacl.Impl",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let lemma_load_64_bytes (k:lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9:uint64) : Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\
v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\
v b9 == v (Seq.index k 63)
)
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k)
=
lemma_nat_from_bytes_le_append (Seq.slice k 0 7) (Seq.slice k 7 14);
lemma_nat_from_bytes_le_append (Seq.slice k 0 14) (Seq.slice k 14 21);
lemma_nat_from_bytes_le_append (Seq.slice k 0 21) (Seq.slice k 21 28);
lemma_nat_from_bytes_le_append (Seq.slice k 0 28) (Seq.slice k 28 35);
lemma_nat_from_bytes_le_append (Seq.slice k 0 35) (Seq.slice k 35 42);
lemma_nat_from_bytes_le_append (Seq.slice k 0 42) (Seq.slice k 42 49);
lemma_nat_from_bytes_le_append (Seq.slice k 0 49) (Seq.slice k 49 56);
lemma_nat_from_bytes_le_append (Seq.slice k 0 56) (Seq.slice k 56 63);
lemma_nat_from_bytes_le_append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63));
assert (Seq.append (Seq.slice k 0 7) (Seq.slice k 7 14) `Seq.equal` Seq.slice k 0 14);
assert (Seq.append (Seq.slice k 0 14) (Seq.slice k 14 21) `Seq.equal` Seq.slice k 0 21);
assert (Seq.append (Seq.slice k 0 21) (Seq.slice k 21 28) `Seq.equal` Seq.slice k 0 28);
assert (Seq.append (Seq.slice k 0 28) (Seq.slice k 28 35) `Seq.equal` Seq.slice k 0 35);
assert (Seq.append (Seq.slice k 0 35) (Seq.slice k 35 42) `Seq.equal` Seq.slice k 0 42);
assert (Seq.append (Seq.slice k 0 42) (Seq.slice k 42 49) `Seq.equal` Seq.slice k 0 49);
assert (Seq.append (Seq.slice k 0 49) (Seq.slice k 49 56) `Seq.equal` Seq.slice k 0 56);
assert (Seq.append (Seq.slice k 0 56) (Seq.slice k 56 63) `Seq.equal` Seq.slice k 0 63);
assert (Seq.append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63)) `Seq.equal` k);
nat_from_intseq_le_lemma0 (Seq.create 1 (Seq.index k 63));
assert_norm (pow2 56 == 0x100000000000000);
assert_norm (pow2 112 == 0x10000000000000000000000000000);
assert_norm (pow2 168 == 0x1000000000000000000000000000000000000000000);
assert_norm (pow2 224 == 0x100000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 280 == 0x10000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 336 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 392 == 0x100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 448 == 0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 504 == 0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000) | val lemma_load_64_bytes (k: lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9: uint64)
: Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\ v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\ v b9 == v (Seq.index k 63))
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k)
let lemma_load_64_bytes (k: lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9: uint64)
: Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\ v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\ v b9 == v (Seq.index k 63))
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k) = | false | null | true | lemma_nat_from_bytes_le_append (Seq.slice k 0 7) (Seq.slice k 7 14);
lemma_nat_from_bytes_le_append (Seq.slice k 0 14) (Seq.slice k 14 21);
lemma_nat_from_bytes_le_append (Seq.slice k 0 21) (Seq.slice k 21 28);
lemma_nat_from_bytes_le_append (Seq.slice k 0 28) (Seq.slice k 28 35);
lemma_nat_from_bytes_le_append (Seq.slice k 0 35) (Seq.slice k 35 42);
lemma_nat_from_bytes_le_append (Seq.slice k 0 42) (Seq.slice k 42 49);
lemma_nat_from_bytes_le_append (Seq.slice k 0 49) (Seq.slice k 49 56);
lemma_nat_from_bytes_le_append (Seq.slice k 0 56) (Seq.slice k 56 63);
lemma_nat_from_bytes_le_append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63));
assert ((Seq.append (Seq.slice k 0 7) (Seq.slice k 7 14)) `Seq.equal` (Seq.slice k 0 14));
assert ((Seq.append (Seq.slice k 0 14) (Seq.slice k 14 21)) `Seq.equal` (Seq.slice k 0 21));
assert ((Seq.append (Seq.slice k 0 21) (Seq.slice k 21 28)) `Seq.equal` (Seq.slice k 0 28));
assert ((Seq.append (Seq.slice k 0 28) (Seq.slice k 28 35)) `Seq.equal` (Seq.slice k 0 35));
assert ((Seq.append (Seq.slice k 0 35) (Seq.slice k 35 42)) `Seq.equal` (Seq.slice k 0 42));
assert ((Seq.append (Seq.slice k 0 42) (Seq.slice k 42 49)) `Seq.equal` (Seq.slice k 0 49));
assert ((Seq.append (Seq.slice k 0 49) (Seq.slice k 49 56)) `Seq.equal` (Seq.slice k 0 56));
assert ((Seq.append (Seq.slice k 0 56) (Seq.slice k 56 63)) `Seq.equal` (Seq.slice k 0 63));
assert ((Seq.append (Seq.slice k 0 63) (Seq.create 1 (Seq.index k 63))) `Seq.equal` k);
nat_from_intseq_le_lemma0 (Seq.create 1 (Seq.index k 63));
assert_norm (pow2 56 == 0x100000000000000);
assert_norm (pow2 112 == 0x10000000000000000000000000000);
assert_norm (pow2 168 == 0x1000000000000000000000000000000000000000000);
assert_norm (pow2 224 == 0x100000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 280 == 0x10000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 336 ==
0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000);
assert_norm (pow2 392 ==
0x100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
);
assert_norm (pow2 448 ==
0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
);
assert_norm (pow2 504 ==
0x1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
) | {
"checked_file": "Hacl.Impl.Load56.fst.checked",
"dependencies": [
"prims.fst.checked",
"Lib.IntTypes.fsti.checked",
"Lib.ByteSequence.fsti.checked",
"Lib.ByteBuffer.fsti.checked",
"Lib.Buffer.fsti.checked",
"Hacl.Spec.BignumQ.Definitions.fst.checked",
"Hacl.Impl.BignumQ.Mul.fsti.checked",
"Hacl.Bignum25519.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.UInt.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Math.Lemmas.fst.checked",
"FStar.HyperStack.ST.fsti.checked",
"FStar.HyperStack.All.fst.checked",
"FStar.Calc.fsti.checked"
],
"interface_file": false,
"source_file": "Hacl.Impl.Load56.fst"
} | [
"lemma"
] | [
"Lib.ByteSequence.lbytes",
"Lib.IntTypes.uint64",
"FStar.Pervasives.assert_norm",
"Prims.eq2",
"Prims.int",
"Prims.pow2",
"Prims.unit",
"Lib.ByteSequence.nat_from_intseq_le_lemma0",
"Lib.IntTypes.U8",
"Lib.IntTypes.SEC",
"FStar.Seq.Base.create",
"Lib.IntTypes.uint_t",
"FStar.Seq.Base.index",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.append",
"FStar.Seq.Base.slice",
"Hacl.Impl.Load56.lemma_nat_from_bytes_le_append",
"Prims.l_and",
"Prims.l_or",
"Lib.IntTypes.range",
"Lib.IntTypes.U64",
"Prims.b2t",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"FStar.Mul.op_Star",
"Lib.Sequence.length",
"Lib.IntTypes.v",
"Lib.ByteSequence.nat_from_bytes_le",
"Prims.squash",
"Prims.nat",
"Hacl.Spec.BignumQ.Definitions.wide_as_nat5",
"FStar.Pervasives.Native.Mktuple10",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module Hacl.Impl.Load56
module ST = FStar.HyperStack.ST
open FStar.HyperStack.All
open FStar.Mul
open Lib.IntTypes
open Lib.ByteSequence
open Lib.Buffer
open Lib.ByteBuffer
module F56 = Hacl.Impl.BignumQ.Mul
module S56 = Hacl.Spec.BignumQ.Definitions
#reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 0"
inline_for_extraction noextract
val hload56_le:
b:lbuffer uint8 64ul
-> off:size_t{v off <= 56} ->
Stack uint64
(requires fun h -> live h b)
(ensures fun h0 z h1 -> h0 == h1 /\
v z < 0x100000000000000 /\
v z == nat_from_bytes_le (Seq.slice (as_seq h0 b) (v off) (v off + 7))
)
let hload56_le b off =
let h0 = ST.get() in
let b8 = sub b off 8ul in
let z = uint_from_bytes_le b8 in
let z' = z &. u64 0xffffffffffffff in
assert_norm (0xffffffffffffff == pow2 56 - 1);
assert_norm (0x100000000000000 == pow2 56 );
calc (==) {
v z' <: nat;
(==) { }
v (z &. u64 0xffffffffffffff);
(==) { logand_spec z (u64 0xffffffffffffff) }
v z `logand_v` 0xffffffffffffff;
(==) { assert_norm(pow2 56 - 1 == 0xffffffffffffff); UInt.logand_mask (UInt.to_uint_t 64 (v z)) 56 }
(v z % pow2 56);
(==) { lemma_reveal_uint_to_bytes_le #U64 #SEC (as_seq h0 b8) }
nat_from_bytes_le (as_seq h0 b8) % pow2 56;
(==) { nat_from_intseq_le_slice_lemma (as_seq h0 b8) 7 }
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) +
pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) % pow2 56;
(==) { FStar.Math.Lemmas.lemma_mod_plus_distr_r
(nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7))
(pow2 (7 * 8) * nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8))
(pow2 56);
FStar.Math.Lemmas.swap_mul (pow2 (7 * 8)) (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8));
FStar.Math.Lemmas.cancel_mul_mod (nat_from_bytes_le (Seq.slice (as_seq h0 b8) 7 8)) (pow2 56) }
nat_from_bytes_le (Seq.slice (as_seq h0 b8) 0 7) <: nat;
};
assert (Seq.equal
(Seq.slice (as_seq h0 b) (v off) (v off + 7))
(Seq.slice (as_seq h0 b8) 0 7));
z'
let lemma_nat_from_bytes_le_append (k1 k2:bytes) : Lemma
(requires Seq.length k1 + Seq.length k2 <= max_size_t)
(ensures nat_from_bytes_le (Seq.append k1 k2) ==
nat_from_bytes_le k1 + pow2 (Seq.length k1 * 8) * nat_from_bytes_le k2) =
let k = Seq.append k1 k2 in
let n = Seq.length k1 + Seq.length k2 in
nat_from_intseq_le_slice_lemma #U8 #SEC #n k (Seq.length k1);
assert (k1 `Seq.equal` Seq.slice k 0 (Seq.length k1));
assert (k2 `Seq.equal` Seq.slice k (Seq.length k1) n)
#push-options "--z3rlimit 100"
let lemma_load_64_bytes (k:lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9:uint64) : Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\
v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\
v b9 == v (Seq.index k 63)
)
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k) | false | false | Hacl.Impl.Load56.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 0,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val lemma_load_64_bytes (k: lbytes 64) (b0 b1 b2 b3 b4 b5 b6 b7 b8 b9: uint64)
: Lemma
(requires
v b0 == nat_from_bytes_le (Seq.slice k 0 7) /\ v b1 == nat_from_bytes_le (Seq.slice k 7 14) /\
v b2 == nat_from_bytes_le (Seq.slice k 14 21) /\
v b3 == nat_from_bytes_le (Seq.slice k 21 28) /\
v b4 == nat_from_bytes_le (Seq.slice k 28 35) /\
v b5 == nat_from_bytes_le (Seq.slice k 35 42) /\
v b6 == nat_from_bytes_le (Seq.slice k 42 49) /\
v b7 == nat_from_bytes_le (Seq.slice k 49 56) /\
v b8 == nat_from_bytes_le (Seq.slice k 56 63) /\ v b9 == v (Seq.index k 63))
(ensures S56.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) == nat_from_bytes_le k) | [] | Hacl.Impl.Load56.lemma_load_64_bytes | {
"file_name": "code/ed25519/Hacl.Impl.Load56.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
k: Lib.ByteSequence.lbytes 64 ->
b0: Lib.IntTypes.uint64 ->
b1: Lib.IntTypes.uint64 ->
b2: Lib.IntTypes.uint64 ->
b3: Lib.IntTypes.uint64 ->
b4: Lib.IntTypes.uint64 ->
b5: Lib.IntTypes.uint64 ->
b6: Lib.IntTypes.uint64 ->
b7: Lib.IntTypes.uint64 ->
b8: Lib.IntTypes.uint64 ->
b9: Lib.IntTypes.uint64
-> FStar.Pervasives.Lemma
(requires
Lib.IntTypes.v b0 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 0 7) /\
Lib.IntTypes.v b1 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 7 14) /\
Lib.IntTypes.v b2 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 14 21) /\
Lib.IntTypes.v b3 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 21 28) /\
Lib.IntTypes.v b4 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 28 35) /\
Lib.IntTypes.v b5 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 35 42) /\
Lib.IntTypes.v b6 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 42 49) /\
Lib.IntTypes.v b7 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 49 56) /\
Lib.IntTypes.v b8 == Lib.ByteSequence.nat_from_bytes_le (FStar.Seq.Base.slice k 56 63) /\
Lib.IntTypes.v b9 == Lib.IntTypes.v (FStar.Seq.Base.index k 63))
(ensures
Hacl.Spec.BignumQ.Definitions.wide_as_nat5 (b0, b1, b2, b3, b4, b5, b6, b7, b8, b9) ==
Lib.ByteSequence.nat_from_bytes_le k) | {
"end_col": 157,
"end_line": 116,
"start_col": 2,
"start_line": 89
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let szt_fv = R.pack_fv szt_lid | let szt_fv = | false | null | false | R.pack_fv szt_lid | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.szt_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val szt_fv : FStar.Reflection.Types.fv | [] | Pulse.Reflection.Util.szt_fv | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | FStar.Reflection.Types.fv | {
"end_col": 30,
"end_line": 30,
"start_col": 13,
"start_line": 30
} |
|
Prims.Tot | val mk_tuple2 (u1 u2: R.universe) (a1 a2: R.term) : R.term | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit)) | val mk_tuple2 (u1 u2: R.universe) (a1 a2: R.term) : R.term
let mk_tuple2 (u1 u2: R.universe) (a1 a2: R.term) : R.term = | false | null | false | let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit)) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.universe",
"FStar.Reflection.Types.term",
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_App",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Reflection.V2.Data.aqualv",
"FStar.Reflection.V2.Data.Q_Explicit",
"FStar.Reflection.V2.Data.Tv_UInst",
"FStar.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.tuple2_lid",
"Prims.Cons",
"Prims.Nil"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"] | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_tuple2 (u1 u2: R.universe) (a1 a2: R.term) : R.term | [] | Pulse.Reflection.Util.mk_tuple2 | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
u1: FStar.Reflection.Types.universe ->
u2: FStar.Reflection.Types.universe ->
a1: FStar.Reflection.Types.term ->
a2: FStar.Reflection.Types.term
-> FStar.Reflection.Types.term | {
"end_col": 37,
"end_line": 47,
"start_col": 2,
"start_line": 44
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let true_tm = R.pack_ln (R.Tv_Const (R.C_True)) | let true_tm = | false | null | false | R.pack_ln (R.Tv_Const (R.C_True)) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_Const",
"FStar.Reflection.V2.Data.C_True"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val true_tm : FStar.Reflection.Types.term | [] | Pulse.Reflection.Util.true_tm | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | FStar.Reflection.Types.term | {
"end_col": 47,
"end_line": 63,
"start_col": 14,
"start_line": 63
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let inames_lid = mk_pulse_lib_core_lid "inames" | let inames_lid = | false | null | false | mk_pulse_lib_core_lid "inames" | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val inames_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.inames_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 47,
"end_line": 67,
"start_col": 17,
"start_line": 67
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit" | let stt_atomic_admit_lid = | false | null | false | mk_pulse_lib_core_lid "stt_atomic_admit" | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val stt_atomic_admit_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.stt_atomic_admit_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 67,
"end_line": 113,
"start_col": 27,
"start_line": 113
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let nat_tm = R.pack_ln (R.Tv_FVar nat_fv) | let nat_tm = | false | null | false | R.pack_ln (R.Tv_FVar nat_fv) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_FVar",
"Pulse.Reflection.Util.nat_fv"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"] | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val nat_tm : FStar.Reflection.Types.term | [] | Pulse.Reflection.Util.nat_tm | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | FStar.Reflection.Types.term | {
"end_col": 41,
"end_line": 28,
"start_col": 13,
"start_line": 28
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"] | let snd_lid = | false | null | false | ["FStar"; "Pervasives"; "Native"; "snd"] | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Prims.Cons",
"Prims.string",
"Prims.Nil"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"] | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val snd_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.snd_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 54,
"end_line": 41,
"start_col": 14,
"start_line": 41
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let pure_lid = mk_pulse_lib_core_lid "pure" | let pure_lid = | false | null | false | mk_pulse_lib_core_lid "pure" | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val pure_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.pure_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 43,
"end_line": 77,
"start_col": 15,
"start_line": 77
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let int_lid = R.int_lid | let int_lid = | false | null | false | R.int_lid | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Const.int_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val int_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.int_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 24,
"end_line": 15,
"start_col": 15,
"start_line": 15
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists" | let elim_exists_lid = | false | null | false | mk_pulse_lib_core_lid "elim_exists" | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val elim_exists_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.elim_exists_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 57,
"end_line": 209,
"start_col": 22,
"start_line": 209
} |
|
Prims.Tot | val mk_intro_exists (u: R.universe) (a p e: R.term) : R.term | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit)) | val mk_intro_exists (u: R.universe) (a p e: R.term) : R.term
let mk_intro_exists (u: R.universe) (a p e: R.term) : R.term = | false | null | false | let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit)) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.universe",
"FStar.Reflection.Types.term",
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_App",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Reflection.V2.Data.aqualv",
"FStar.Reflection.V2.Data.Q_Explicit",
"FStar.Reflection.V2.Data.Q_Implicit",
"FStar.Reflection.V2.Data.Tv_UInst",
"FStar.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.intro_exists_lid",
"Prims.Cons",
"Prims.Nil"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_intro_exists (u: R.universe) (a p e: R.term) : R.term | [] | Pulse.Reflection.Util.mk_intro_exists | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
u107: FStar.Reflection.Types.universe ->
a: FStar.Reflection.Types.term ->
p: FStar.Reflection.Types.term ->
e: FStar.Reflection.Types.term
-> FStar.Reflection.Types.term | {
"end_col": 42,
"end_line": 232,
"start_col": 69,
"start_line": 228
} |
Prims.Tot | val mk_stt_comp (u: R.universe) (res pre post: R.term) : Tot R.term | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit)) | val mk_stt_comp (u: R.universe) (res pre post: R.term) : Tot R.term
let mk_stt_comp (u: R.universe) (res pre post: R.term) : Tot R.term = | false | null | false | let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit)) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.universe",
"FStar.Reflection.Types.term",
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_App",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Reflection.V2.Data.aqualv",
"FStar.Reflection.V2.Data.Q_Explicit",
"FStar.Reflection.V2.Data.Tv_UInst",
"Pulse.Reflection.Util.stt_fv",
"Prims.Cons",
"Prims.Nil"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_stt_comp (u: R.universe) (res pre post: R.term) : Tot R.term | [] | Pulse.Reflection.Util.mk_stt_comp | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
u42: FStar.Reflection.Types.universe ->
res: FStar.Reflection.Types.term ->
pre: FStar.Reflection.Types.term ->
post: FStar.Reflection.Types.term
-> FStar.Reflection.Types.term | {
"end_col": 45,
"end_line": 140,
"start_col": 67,
"start_line": 136
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bool_lid = R.bool_lid | let bool_lid = | false | null | false | R.bool_lid | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Const.bool_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bool_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.bool_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 25,
"end_line": 14,
"start_col": 15,
"start_line": 14
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bool_tm = R.pack_ln (R.Tv_FVar bool_fv) | let bool_tm = | false | null | false | R.pack_ln (R.Tv_FVar bool_fv) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_FVar",
"Pulse.Reflection.Util.bool_fv"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bool_tm : FStar.Reflection.Types.term | [] | Pulse.Reflection.Util.bool_tm | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | FStar.Reflection.Types.term | {
"end_col": 43,
"end_line": 25,
"start_col": 14,
"start_line": 25
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let return_stt_ghost_noeq_lid = mk_pulse_lib_core_lid "return_stt_ghost_noeq" | let return_stt_ghost_noeq_lid = | false | null | false | mk_pulse_lib_core_lid "return_stt_ghost_noeq" | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let mk_intro_exists_erased (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_erased_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let while_lid = mk_pulse_lib_core_lid "while_loop"
let mk_while (inv cond body:R.term) : R.term =
let t = R.pack_ln (R.Tv_FVar (R.pack_fv while_lid)) in
let t = R.pack_ln (R.Tv_App t (inv, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (cond, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (body, R.Q_Explicit))
let vprop_eq_tm t1 t2 =
let open R in
let u2 =
pack_universe (Uv_Succ (pack_universe (Uv_Succ (pack_universe Uv_Zero)))) in
let t = pack_ln (Tv_UInst (pack_fv eq2_qn) [u2]) in
let t = pack_ln (Tv_App t (pack_ln (Tv_FVar (pack_fv vprop_lid)), Q_Implicit)) in
let t = pack_ln (Tv_App t (t1, Q_Explicit)) in
let t = pack_ln (Tv_App t (t2, Q_Explicit)) in
t
let emp_inames_tm : R.term = R.pack_ln (R.Tv_FVar (R.pack_fv emp_inames_lid))
let non_informative_witness_lid = mk_pulse_lib_core_lid "non_informative_witness"
let non_informative_witness_rt (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv non_informative_witness_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Explicit)) in
t
let stt_vprop_equiv_fv =
R.pack_fv (mk_pulse_lib_core_lid "vprop_equiv")
let stt_vprop_equiv_tm =
R.pack_ln (R.Tv_FVar stt_vprop_equiv_fv)
let stt_vprop_equiv (t1 t2:R.term) =
let open R in
let t = pack_ln (Tv_App stt_vprop_equiv_tm (t1, Q_Explicit)) in
pack_ln (Tv_App t (t2, Q_Explicit))
let return_stt_lid = mk_pulse_lib_core_lid "return_stt"
let return_stt_noeq_lid = mk_pulse_lib_core_lid "return"
let return_stt_atomic_lid = mk_pulse_lib_core_lid "return_stt_atomic"
let return_stt_atomic_noeq_lid = mk_pulse_lib_core_lid "return_stt_atomic_noeq" | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val return_stt_ghost_noeq_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.return_stt_ghost_noeq_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 77,
"end_line": 282,
"start_col": 32,
"start_line": 282
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let return_stt_atomic_lid = mk_pulse_lib_core_lid "return_stt_atomic" | let return_stt_atomic_lid = | false | null | false | mk_pulse_lib_core_lid "return_stt_atomic" | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let mk_intro_exists_erased (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_erased_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let while_lid = mk_pulse_lib_core_lid "while_loop"
let mk_while (inv cond body:R.term) : R.term =
let t = R.pack_ln (R.Tv_FVar (R.pack_fv while_lid)) in
let t = R.pack_ln (R.Tv_App t (inv, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (cond, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (body, R.Q_Explicit))
let vprop_eq_tm t1 t2 =
let open R in
let u2 =
pack_universe (Uv_Succ (pack_universe (Uv_Succ (pack_universe Uv_Zero)))) in
let t = pack_ln (Tv_UInst (pack_fv eq2_qn) [u2]) in
let t = pack_ln (Tv_App t (pack_ln (Tv_FVar (pack_fv vprop_lid)), Q_Implicit)) in
let t = pack_ln (Tv_App t (t1, Q_Explicit)) in
let t = pack_ln (Tv_App t (t2, Q_Explicit)) in
t
let emp_inames_tm : R.term = R.pack_ln (R.Tv_FVar (R.pack_fv emp_inames_lid))
let non_informative_witness_lid = mk_pulse_lib_core_lid "non_informative_witness"
let non_informative_witness_rt (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv non_informative_witness_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Explicit)) in
t
let stt_vprop_equiv_fv =
R.pack_fv (mk_pulse_lib_core_lid "vprop_equiv")
let stt_vprop_equiv_tm =
R.pack_ln (R.Tv_FVar stt_vprop_equiv_fv)
let stt_vprop_equiv (t1 t2:R.term) =
let open R in
let t = pack_ln (Tv_App stt_vprop_equiv_tm (t1, Q_Explicit)) in
pack_ln (Tv_App t (t2, Q_Explicit))
let return_stt_lid = mk_pulse_lib_core_lid "return_stt" | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val return_stt_atomic_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.return_stt_atomic_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 69,
"end_line": 279,
"start_col": 28,
"start_line": 279
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let unit_fv = R.pack_fv unit_lid | let unit_fv = | false | null | false | R.pack_fv unit_lid | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.unit_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unit_fv : FStar.Reflection.Types.fv | [] | Pulse.Reflection.Util.unit_fv | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | FStar.Reflection.Types.fv | {
"end_col": 32,
"end_line": 22,
"start_col": 14,
"start_line": 22
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_lift_ghost_atomic (u:R.universe) (a opened pre post e reveal_a:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "lift_stt_ghost" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e, Q_Explicit)) in
pack_ln (R.Tv_App t (reveal_a, Q_Explicit)) | let mk_lift_ghost_atomic (u: R.universe) (a opened pre post e reveal_a: R.term) = | false | null | false | let open R in
let lid = mk_pulse_lib_core_lid "lift_stt_ghost" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e, Q_Explicit)) in
pack_ln (R.Tv_App t (reveal_a, Q_Explicit)) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.universe",
"FStar.Reflection.Types.term",
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_App",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Reflection.V2.Data.aqualv",
"FStar.Reflection.V2.Data.Q_Explicit",
"FStar.Reflection.V2.Data.Q_Implicit",
"FStar.Reflection.V2.Data.Tv_UInst",
"FStar.Reflection.V2.Builtins.pack_fv",
"Prims.Cons",
"Prims.Nil",
"Prims.list",
"Prims.string",
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let mk_intro_exists_erased (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_erased_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let while_lid = mk_pulse_lib_core_lid "while_loop"
let mk_while (inv cond body:R.term) : R.term =
let t = R.pack_ln (R.Tv_FVar (R.pack_fv while_lid)) in
let t = R.pack_ln (R.Tv_App t (inv, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (cond, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (body, R.Q_Explicit))
let vprop_eq_tm t1 t2 =
let open R in
let u2 =
pack_universe (Uv_Succ (pack_universe (Uv_Succ (pack_universe Uv_Zero)))) in
let t = pack_ln (Tv_UInst (pack_fv eq2_qn) [u2]) in
let t = pack_ln (Tv_App t (pack_ln (Tv_FVar (pack_fv vprop_lid)), Q_Implicit)) in
let t = pack_ln (Tv_App t (t1, Q_Explicit)) in
let t = pack_ln (Tv_App t (t2, Q_Explicit)) in
t
let emp_inames_tm : R.term = R.pack_ln (R.Tv_FVar (R.pack_fv emp_inames_lid))
let non_informative_witness_lid = mk_pulse_lib_core_lid "non_informative_witness"
let non_informative_witness_rt (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv non_informative_witness_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Explicit)) in
t
let stt_vprop_equiv_fv =
R.pack_fv (mk_pulse_lib_core_lid "vprop_equiv")
let stt_vprop_equiv_tm =
R.pack_ln (R.Tv_FVar stt_vprop_equiv_fv)
let stt_vprop_equiv (t1 t2:R.term) =
let open R in
let t = pack_ln (Tv_App stt_vprop_equiv_tm (t1, Q_Explicit)) in
pack_ln (Tv_App t (t2, Q_Explicit))
let return_stt_lid = mk_pulse_lib_core_lid "return_stt"
let return_stt_noeq_lid = mk_pulse_lib_core_lid "return"
let return_stt_atomic_lid = mk_pulse_lib_core_lid "return_stt_atomic"
let return_stt_atomic_noeq_lid = mk_pulse_lib_core_lid "return_stt_atomic_noeq"
let return_stt_ghost_lid = mk_pulse_lib_core_lid "return_stt_ghost"
let return_stt_ghost_noeq_lid = mk_pulse_lib_core_lid "return_stt_ghost_noeq"
let mk_stt_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_atomic_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_atomic_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_ghost_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_ghost_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
// Wrapper.lift_stt_atomic<u> #a #pre #post e
let mk_lift_atomic_stt (u:R.universe) (a pre post e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "lift_stt_atomic" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.lift_stt_ghost<u> #a #opened #pre #post e reveal_a | false | false | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_lift_ghost_atomic : u156: FStar.Reflection.Types.universe ->
a: FStar.Reflection.Types.term ->
opened: FStar.Reflection.Types.term ->
pre: FStar.Reflection.Types.term ->
post: FStar.Reflection.Types.term ->
e: FStar.Reflection.Types.term ->
reveal_a: FStar.Reflection.Types.term
-> FStar.Reflection.Types.term | [] | Pulse.Reflection.Util.mk_lift_ghost_atomic | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
u156: FStar.Reflection.Types.universe ->
a: FStar.Reflection.Types.term ->
opened: FStar.Reflection.Types.term ->
pre: FStar.Reflection.Types.term ->
post: FStar.Reflection.Types.term ->
e: FStar.Reflection.Types.term ->
reveal_a: FStar.Reflection.Types.term
-> FStar.Reflection.Types.term | {
"end_col": 45,
"end_line": 352,
"start_col": 2,
"start_line": 344
} |
|
Prims.Tot | val mk_stt_atomic_return_noeq (u: R.universe) (ty t post: R.term) : R.term | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_stt_atomic_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit)) | val mk_stt_atomic_return_noeq (u: R.universe) (ty t post: R.term) : R.term
let mk_stt_atomic_return_noeq (u: R.universe) (ty t post: R.term) : R.term = | false | null | false | let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit)) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.universe",
"FStar.Reflection.Types.term",
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_App",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Reflection.V2.Data.aqualv",
"FStar.Reflection.V2.Data.Q_Explicit",
"FStar.Reflection.V2.Data.Q_Implicit",
"FStar.Reflection.V2.Data.Tv_UInst",
"FStar.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.return_stt_atomic_noeq_lid",
"Prims.Cons",
"Prims.Nil"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let mk_intro_exists_erased (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_erased_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let while_lid = mk_pulse_lib_core_lid "while_loop"
let mk_while (inv cond body:R.term) : R.term =
let t = R.pack_ln (R.Tv_FVar (R.pack_fv while_lid)) in
let t = R.pack_ln (R.Tv_App t (inv, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (cond, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (body, R.Q_Explicit))
let vprop_eq_tm t1 t2 =
let open R in
let u2 =
pack_universe (Uv_Succ (pack_universe (Uv_Succ (pack_universe Uv_Zero)))) in
let t = pack_ln (Tv_UInst (pack_fv eq2_qn) [u2]) in
let t = pack_ln (Tv_App t (pack_ln (Tv_FVar (pack_fv vprop_lid)), Q_Implicit)) in
let t = pack_ln (Tv_App t (t1, Q_Explicit)) in
let t = pack_ln (Tv_App t (t2, Q_Explicit)) in
t
let emp_inames_tm : R.term = R.pack_ln (R.Tv_FVar (R.pack_fv emp_inames_lid))
let non_informative_witness_lid = mk_pulse_lib_core_lid "non_informative_witness"
let non_informative_witness_rt (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv non_informative_witness_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Explicit)) in
t
let stt_vprop_equiv_fv =
R.pack_fv (mk_pulse_lib_core_lid "vprop_equiv")
let stt_vprop_equiv_tm =
R.pack_ln (R.Tv_FVar stt_vprop_equiv_fv)
let stt_vprop_equiv (t1 t2:R.term) =
let open R in
let t = pack_ln (Tv_App stt_vprop_equiv_tm (t1, Q_Explicit)) in
pack_ln (Tv_App t (t2, Q_Explicit))
let return_stt_lid = mk_pulse_lib_core_lid "return_stt"
let return_stt_noeq_lid = mk_pulse_lib_core_lid "return"
let return_stt_atomic_lid = mk_pulse_lib_core_lid "return_stt_atomic"
let return_stt_atomic_noeq_lid = mk_pulse_lib_core_lid "return_stt_atomic_noeq"
let return_stt_ghost_lid = mk_pulse_lib_core_lid "return_stt_ghost"
let return_stt_ghost_noeq_lid = mk_pulse_lib_core_lid "return_stt_ghost_noeq"
let mk_stt_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_atomic_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_stt_atomic_return_noeq (u: R.universe) (ty t post: R.term) : R.term | [] | Pulse.Reflection.Util.mk_stt_atomic_return_noeq | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
u136: FStar.Reflection.Types.universe ->
ty: FStar.Reflection.Types.term ->
t: FStar.Reflection.Types.term ->
post: FStar.Reflection.Types.term
-> FStar.Reflection.Types.term | {
"end_col": 45,
"end_line": 314,
"start_col": 12,
"start_line": 309
} |
Prims.Tot | val mk_seq_create (u: R.universe) (a len v: R.term) : R.term | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_seq_create (u:R.universe) (a:R.term) (len:R.term) (v:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (R.pack_fv seq_create_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Implicit)) in
let t = pack_ln (Tv_App t (len, Q_Explicit)) in
pack_ln (Tv_App t (v, Q_Explicit)) | val mk_seq_create (u: R.universe) (a len v: R.term) : R.term
let mk_seq_create (u: R.universe) (a len v: R.term) : R.term = | false | null | false | let open R in
let t = pack_ln (Tv_UInst (R.pack_fv seq_create_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Implicit)) in
let t = pack_ln (Tv_App t (len, Q_Explicit)) in
pack_ln (Tv_App t (v, Q_Explicit)) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.universe",
"FStar.Reflection.Types.term",
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_App",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Reflection.V2.Data.aqualv",
"FStar.Reflection.V2.Data.Q_Explicit",
"FStar.Reflection.V2.Data.Q_Implicit",
"FStar.Reflection.V2.Data.Tv_UInst",
"FStar.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.seq_create_lid",
"Prims.Cons",
"Prims.Nil"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let mk_intro_exists_erased (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_erased_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let while_lid = mk_pulse_lib_core_lid "while_loop"
let mk_while (inv cond body:R.term) : R.term =
let t = R.pack_ln (R.Tv_FVar (R.pack_fv while_lid)) in
let t = R.pack_ln (R.Tv_App t (inv, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (cond, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (body, R.Q_Explicit))
let vprop_eq_tm t1 t2 =
let open R in
let u2 =
pack_universe (Uv_Succ (pack_universe (Uv_Succ (pack_universe Uv_Zero)))) in
let t = pack_ln (Tv_UInst (pack_fv eq2_qn) [u2]) in
let t = pack_ln (Tv_App t (pack_ln (Tv_FVar (pack_fv vprop_lid)), Q_Implicit)) in
let t = pack_ln (Tv_App t (t1, Q_Explicit)) in
let t = pack_ln (Tv_App t (t2, Q_Explicit)) in
t
let emp_inames_tm : R.term = R.pack_ln (R.Tv_FVar (R.pack_fv emp_inames_lid))
let non_informative_witness_lid = mk_pulse_lib_core_lid "non_informative_witness"
let non_informative_witness_rt (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv non_informative_witness_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Explicit)) in
t
let stt_vprop_equiv_fv =
R.pack_fv (mk_pulse_lib_core_lid "vprop_equiv")
let stt_vprop_equiv_tm =
R.pack_ln (R.Tv_FVar stt_vprop_equiv_fv)
let stt_vprop_equiv (t1 t2:R.term) =
let open R in
let t = pack_ln (Tv_App stt_vprop_equiv_tm (t1, Q_Explicit)) in
pack_ln (Tv_App t (t2, Q_Explicit))
let return_stt_lid = mk_pulse_lib_core_lid "return_stt"
let return_stt_noeq_lid = mk_pulse_lib_core_lid "return"
let return_stt_atomic_lid = mk_pulse_lib_core_lid "return_stt_atomic"
let return_stt_atomic_noeq_lid = mk_pulse_lib_core_lid "return_stt_atomic_noeq"
let return_stt_ghost_lid = mk_pulse_lib_core_lid "return_stt_ghost"
let return_stt_ghost_noeq_lid = mk_pulse_lib_core_lid "return_stt_ghost_noeq"
let mk_stt_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_atomic_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_atomic_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_ghost_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_ghost_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
// Wrapper.lift_stt_atomic<u> #a #pre #post e
let mk_lift_atomic_stt (u:R.universe) (a pre post e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "lift_stt_atomic" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.lift_stt_ghost<u> #a #opened #pre #post e reveal_a
let mk_lift_ghost_atomic (u:R.universe) (a opened pre post e reveal_a:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "lift_stt_ghost" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e, Q_Explicit)) in
pack_ln (R.Tv_App t (reveal_a, Q_Explicit))
// Wrapper.bind_stt<u1, u2> #a #b #pre1 #post1 #post2 e1 e2
let mk_bind_stt
(u1 u2:R.universe)
(ty1 ty2:R.term)
(pre1 post1: R.term)
(post2: R.term)
(t1 t2:R.term)
: R.term
= let bind_lid = mk_pulse_lib_core_lid "bind_stt" in
let head = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app head
[(ty1, R.Q_Implicit)])
[(ty2, R.Q_Implicit)])
[(pre1, R.Q_Implicit)])
[(post1, R.Q_Implicit)])
[(post2, R.Q_Implicit)])
[(t1, R.Q_Explicit)])
[(t2, R.Q_Explicit)]
// Wrapper.bind_sttg<u1, u2> #a #b #opened #pre1 #post1 #post2 e1 e2
let mk_bind_ghost
(u1 u2:R.universe)
(a b opened pre1 post1 post2 e1 e2:R.term) =
let open R in
let bind_lid = mk_pulse_lib_core_lid "bind_sttg" in
let t = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (b, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e1, Q_Explicit)) in
pack_ln (R.Tv_App t (e2, Q_Explicit))
// Wrapper.bind_stt_ghost_atomic<u1, u2> #a #b #opened #pre1 #post1 #post2 e1 e2 reveal_a
let mk_bind_ghost_atomic
(u1 u2:R.universe)
(a b opened pre1 post1 post2 e1 e2 reveal_a:R.term) =
let open R in
let bind_lid = mk_pulse_lib_core_lid "bind_stt_ghost_atomic" in
let t = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (b, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e1, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (e2, Q_Explicit)) in
pack_ln (R.Tv_App t (reveal_a, Q_Explicit))
// Wrapper.bind_stt_atomic_ghost<u1, u2> #a #b #opened #pre1 #post1 #post2 e1 e2 reveal_b
let mk_bind_atomic_ghost
(u1 u2:R.universe)
(a b opened pre1 post1 post2 e1 e2 reveal_b:R.term) =
let open R in
let bind_lid = mk_pulse_lib_core_lid "bind_stt_atomic_ghost" in
let t = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (b, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e1, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (e2, Q_Explicit)) in
pack_ln (R.Tv_App t (reveal_b, Q_Explicit))
// Wrapper.frame_stt<u> #ty #pre #post frame t
let mk_frame_stt
(u:R.universe)
(ty:R.term)
(pre: R.term)
(post: R.term)
(frame: R.term)
(t:R.term)
: R.term
= let frame_lid = mk_pulse_lib_core_lid "frame_stt" in
let frame_fv = R.pack_fv frame_lid in
let frame_univ_inst u = R.pack_ln (R.Tv_UInst (R.pack_fv frame_lid) [u]) in
let head = frame_univ_inst u in
R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app head [(ty, R.Q_Implicit)])
[(pre, R.Q_Implicit)])
[(post, R.Q_Implicit)])
[(frame, R.Q_Explicit)])
[(t, R.Q_Explicit)]
// Wrapper.frame_stt_atomic<u> #a #opened #pre #post frame e
let mk_frame_stt_atomic (u:R.universe) (a opened pre post frame e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "frame_stt_atomic" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (frame, Q_Explicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.frame_stt_ghost<u> #a #opened #pre #post frame e
let mk_frame_stt_ghost (u:R.universe) (a opened pre post frame e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "frame_stt_ghost" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (frame, Q_Explicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.sub_stt<u> #ty #pre1 pre2 #post1 post2 () () e
let mk_sub_stt
(u:R.universe)
(ty:R.term)
(pre1 pre2: R.term)
(post1 post2: R.term)
(t:R.term)
: R.term
= let subsumption_lid = mk_pulse_lib_core_lid "sub_stt" in
let subsumption_fv = R.pack_fv subsumption_lid in
let subsumption_univ_inst u = R.pack_ln (R.Tv_UInst subsumption_fv [u]) in
let head = subsumption_univ_inst u in
R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app head [(ty, R.Q_Implicit)])
[(pre1, R.Q_Implicit)])
[(pre2, R.Q_Explicit)])
[(post1, R.Q_Implicit)])
[(post2, R.Q_Explicit)])
[(`(), R.Q_Explicit)])
[(`(), R.Q_Explicit)])
[(t, R.Q_Explicit)]
// Wrapper.sub_stt_atomic<u> #a #opened #pre1 pre2 #post1 post2 () () e
let mk_sub_stt_atomic (u:R.universe) (a opened pre1 pre2 post1 post2 e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "sub_stt_atomic" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre2, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (`(), Q_Explicit)) in
let t = pack_ln (R.Tv_App t (`(), Q_Explicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.sub_stt_ghost<u> #a #opened #pre1 pre2 #post1 post2 () () e
let mk_sub_stt_ghost (u:R.universe) (a opened pre1 pre2 post1 post2 e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "sub_stt_ghost" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre2, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (`(), Q_Explicit)) in
let t = pack_ln (R.Tv_App t (`(), Q_Explicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
let mk_par (u:R.universe) (aL aR preL postL preR postR eL eR:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "stt_par" in
let t = pack_ln (Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (Tv_App t (aL, Q_Implicit)) in
let t = pack_ln (Tv_App t (aR, Q_Implicit)) in
let t = pack_ln (Tv_App t (preL, Q_Implicit)) in
let t = pack_ln (Tv_App t (postL, Q_Implicit)) in
let t = pack_ln (Tv_App t (preR, Q_Implicit)) in
let t = pack_ln (Tv_App t (postR, Q_Implicit)) in
let t = pack_ln (Tv_App t (eL, Q_Explicit)) in
pack_ln (Tv_App t (eR, Q_Explicit))
let mk_rewrite (p q:R.term) =
let open R in
let t = pack_ln (Tv_FVar (pack_fv (mk_pulse_lib_core_lid "rewrite"))) in
let t = pack_ln (Tv_App t (p, Q_Explicit)) in
let t = pack_ln (Tv_App t (q, Q_Explicit)) in
pack_ln (Tv_App t (`(), Q_Explicit))
let mk_withlocal (ret_u:R.universe) (a init pre ret_t post body:R.term) =
let open R in
let lid = mk_pulse_lib_reference_lid "with_local" in
let t = pack_ln (Tv_UInst (R.pack_fv lid) [ret_u]) in
let t = pack_ln (Tv_App t (a, Q_Implicit)) in
let t = pack_ln (Tv_App t (init, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (Tv_App t (ret_t, Q_Implicit)) in
let t = pack_ln (Tv_App t (post, Q_Implicit)) in
pack_ln (Tv_App t (body, Q_Explicit))
///// Utils to derive equiv for common constructs /////
let mk_star_equiv (g:R.env) (t1 t2 t3 t4:R.term)
(eq1:RT.equiv g t1 t3)
(eq2:RT.equiv g t2 t4)
: RT.equiv g (mk_star t1 t2) (mk_star t3 t4) =
admit ()
let mk_stt_comp_equiv (g:R.env) (u:R.universe) (res1 pre1 post1 res2 pre2 post2:R.term)
(res_eq: RT.equiv g res1 res2)
(pre_eq:RT.equiv g pre1 pre2)
(post_eq:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_comp u res1 pre1 post1)
(mk_stt_comp u res2 pre2 post2)
= admit ()
let mk_stt_atomic_comp_equiv (g:R.env) (u:R.universe) (res inames pre1 post1 pre2 post2:R.term)
(pre_eq:RT.equiv g pre1 pre2)
(post_eq:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_atomic_comp u res inames pre1 post1)
(mk_stt_atomic_comp u res inames pre2 post2)
= admit ()
let mk_stt_ghost_comp_equiv (g:R.env) (u:R.universe) (res inames pre1 post1 pre2 post2:R.term)
(pre_eq:RT.equiv g pre1 pre2)
(post_eq:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u res inames pre1 post1)
(mk_stt_ghost_comp u res inames pre2 post2)
= admit ()
let ref_lid = mk_pulse_lib_reference_lid "ref"
let pts_to_lid = mk_pulse_lib_reference_lid "pts_to"
let full_perm_lid = ["Steel"; "FractionalPermission"; "full_perm"]
let mk_ref (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv ref_lid)) in
pack_ln (Tv_App t (a, Q_Explicit))
let mk_pts_to (a:R.term) (r:R.term) (perm:R.term) (v:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pts_to_lid)) in
let t = pack_ln (Tv_App t (a, Q_Implicit)) in
let t = pack_ln (Tv_App t (r, Q_Explicit)) in
let t = pack_ln (Tv_App t (perm, Q_Implicit)) in
pack_ln (Tv_App t (v, Q_Explicit))
let full_perm_tm : R.term =
let open R in
pack_ln (Tv_FVar (pack_fv full_perm_lid))
let pulse_lib_array_core = ["Pulse"; "Lib"; "Array"; "Core"]
let mk_pulse_lib_array_core_lid s = pulse_lib_array_core @ [s]
let array_lid = mk_pulse_lib_array_core_lid "array"
let array_pts_to_lid = mk_pulse_lib_array_core_lid "pts_to"
let array_length_lid = mk_pulse_lib_array_core_lid "length"
let array_is_full_lid = mk_pulse_lib_array_core_lid "is_full_array"
let mk_array (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv array_lid)) in
pack_ln (Tv_App t (a, Q_Explicit))
let mk_array_length (a:R.term) (arr:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv array_length_lid)) in
let t = pack_ln (Tv_App t (a, Q_Implicit)) in
pack_ln (Tv_App t (arr, Q_Explicit))
let mk_array_pts_to (a:R.term) (arr:R.term) (perm:R.term) (v:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv array_pts_to_lid)) in
let t = pack_ln (Tv_App t (a, Q_Implicit)) in
let t = pack_ln (Tv_App t (arr, Q_Explicit)) in
let t = pack_ln (Tv_App t (perm, Q_Implicit)) in
pack_ln (Tv_App t (v, Q_Explicit))
let mk_array_is_full (a:R.term) (arr:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv array_is_full_lid)) in
let t = pack_ln (Tv_App t (a, Q_Implicit)) in
pack_ln (Tv_App t (arr, Q_Explicit))
let mk_seq (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (R.pack_fv seq_lid) [u]) in
pack_ln (Tv_App t (a, Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_seq_create (u: R.universe) (a len v: R.term) : R.term | [] | Pulse.Reflection.Util.mk_seq_create | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
u317: FStar.Reflection.Types.universe ->
a: FStar.Reflection.Types.term ->
len: FStar.Reflection.Types.term ->
v: FStar.Reflection.Types.term
-> FStar.Reflection.Types.term | {
"end_col": 36,
"end_line": 659,
"start_col": 2,
"start_line": 655
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let return_stt_lid = mk_pulse_lib_core_lid "return_stt" | let return_stt_lid = | false | null | false | mk_pulse_lib_core_lid "return_stt" | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let mk_intro_exists_erased (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_erased_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let while_lid = mk_pulse_lib_core_lid "while_loop"
let mk_while (inv cond body:R.term) : R.term =
let t = R.pack_ln (R.Tv_FVar (R.pack_fv while_lid)) in
let t = R.pack_ln (R.Tv_App t (inv, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (cond, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (body, R.Q_Explicit))
let vprop_eq_tm t1 t2 =
let open R in
let u2 =
pack_universe (Uv_Succ (pack_universe (Uv_Succ (pack_universe Uv_Zero)))) in
let t = pack_ln (Tv_UInst (pack_fv eq2_qn) [u2]) in
let t = pack_ln (Tv_App t (pack_ln (Tv_FVar (pack_fv vprop_lid)), Q_Implicit)) in
let t = pack_ln (Tv_App t (t1, Q_Explicit)) in
let t = pack_ln (Tv_App t (t2, Q_Explicit)) in
t
let emp_inames_tm : R.term = R.pack_ln (R.Tv_FVar (R.pack_fv emp_inames_lid))
let non_informative_witness_lid = mk_pulse_lib_core_lid "non_informative_witness"
let non_informative_witness_rt (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv non_informative_witness_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Explicit)) in
t
let stt_vprop_equiv_fv =
R.pack_fv (mk_pulse_lib_core_lid "vprop_equiv")
let stt_vprop_equiv_tm =
R.pack_ln (R.Tv_FVar stt_vprop_equiv_fv)
let stt_vprop_equiv (t1 t2:R.term) =
let open R in
let t = pack_ln (Tv_App stt_vprop_equiv_tm (t1, Q_Explicit)) in
pack_ln (Tv_App t (t2, Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val return_stt_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.return_stt_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 55,
"end_line": 277,
"start_col": 21,
"start_line": 277
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let array_is_full_lid = mk_pulse_lib_array_core_lid "is_full_array" | let array_is_full_lid = | false | null | false | mk_pulse_lib_array_core_lid "is_full_array" | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Pulse.Reflection.Util.mk_pulse_lib_array_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let mk_intro_exists_erased (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_erased_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let while_lid = mk_pulse_lib_core_lid "while_loop"
let mk_while (inv cond body:R.term) : R.term =
let t = R.pack_ln (R.Tv_FVar (R.pack_fv while_lid)) in
let t = R.pack_ln (R.Tv_App t (inv, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (cond, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (body, R.Q_Explicit))
let vprop_eq_tm t1 t2 =
let open R in
let u2 =
pack_universe (Uv_Succ (pack_universe (Uv_Succ (pack_universe Uv_Zero)))) in
let t = pack_ln (Tv_UInst (pack_fv eq2_qn) [u2]) in
let t = pack_ln (Tv_App t (pack_ln (Tv_FVar (pack_fv vprop_lid)), Q_Implicit)) in
let t = pack_ln (Tv_App t (t1, Q_Explicit)) in
let t = pack_ln (Tv_App t (t2, Q_Explicit)) in
t
let emp_inames_tm : R.term = R.pack_ln (R.Tv_FVar (R.pack_fv emp_inames_lid))
let non_informative_witness_lid = mk_pulse_lib_core_lid "non_informative_witness"
let non_informative_witness_rt (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv non_informative_witness_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Explicit)) in
t
let stt_vprop_equiv_fv =
R.pack_fv (mk_pulse_lib_core_lid "vprop_equiv")
let stt_vprop_equiv_tm =
R.pack_ln (R.Tv_FVar stt_vprop_equiv_fv)
let stt_vprop_equiv (t1 t2:R.term) =
let open R in
let t = pack_ln (Tv_App stt_vprop_equiv_tm (t1, Q_Explicit)) in
pack_ln (Tv_App t (t2, Q_Explicit))
let return_stt_lid = mk_pulse_lib_core_lid "return_stt"
let return_stt_noeq_lid = mk_pulse_lib_core_lid "return"
let return_stt_atomic_lid = mk_pulse_lib_core_lid "return_stt_atomic"
let return_stt_atomic_noeq_lid = mk_pulse_lib_core_lid "return_stt_atomic_noeq"
let return_stt_ghost_lid = mk_pulse_lib_core_lid "return_stt_ghost"
let return_stt_ghost_noeq_lid = mk_pulse_lib_core_lid "return_stt_ghost_noeq"
let mk_stt_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_atomic_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_atomic_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_ghost_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_ghost_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
// Wrapper.lift_stt_atomic<u> #a #pre #post e
let mk_lift_atomic_stt (u:R.universe) (a pre post e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "lift_stt_atomic" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.lift_stt_ghost<u> #a #opened #pre #post e reveal_a
let mk_lift_ghost_atomic (u:R.universe) (a opened pre post e reveal_a:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "lift_stt_ghost" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e, Q_Explicit)) in
pack_ln (R.Tv_App t (reveal_a, Q_Explicit))
// Wrapper.bind_stt<u1, u2> #a #b #pre1 #post1 #post2 e1 e2
let mk_bind_stt
(u1 u2:R.universe)
(ty1 ty2:R.term)
(pre1 post1: R.term)
(post2: R.term)
(t1 t2:R.term)
: R.term
= let bind_lid = mk_pulse_lib_core_lid "bind_stt" in
let head = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app head
[(ty1, R.Q_Implicit)])
[(ty2, R.Q_Implicit)])
[(pre1, R.Q_Implicit)])
[(post1, R.Q_Implicit)])
[(post2, R.Q_Implicit)])
[(t1, R.Q_Explicit)])
[(t2, R.Q_Explicit)]
// Wrapper.bind_sttg<u1, u2> #a #b #opened #pre1 #post1 #post2 e1 e2
let mk_bind_ghost
(u1 u2:R.universe)
(a b opened pre1 post1 post2 e1 e2:R.term) =
let open R in
let bind_lid = mk_pulse_lib_core_lid "bind_sttg" in
let t = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (b, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e1, Q_Explicit)) in
pack_ln (R.Tv_App t (e2, Q_Explicit))
// Wrapper.bind_stt_ghost_atomic<u1, u2> #a #b #opened #pre1 #post1 #post2 e1 e2 reveal_a
let mk_bind_ghost_atomic
(u1 u2:R.universe)
(a b opened pre1 post1 post2 e1 e2 reveal_a:R.term) =
let open R in
let bind_lid = mk_pulse_lib_core_lid "bind_stt_ghost_atomic" in
let t = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (b, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e1, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (e2, Q_Explicit)) in
pack_ln (R.Tv_App t (reveal_a, Q_Explicit))
// Wrapper.bind_stt_atomic_ghost<u1, u2> #a #b #opened #pre1 #post1 #post2 e1 e2 reveal_b
let mk_bind_atomic_ghost
(u1 u2:R.universe)
(a b opened pre1 post1 post2 e1 e2 reveal_b:R.term) =
let open R in
let bind_lid = mk_pulse_lib_core_lid "bind_stt_atomic_ghost" in
let t = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (b, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e1, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (e2, Q_Explicit)) in
pack_ln (R.Tv_App t (reveal_b, Q_Explicit))
// Wrapper.frame_stt<u> #ty #pre #post frame t
let mk_frame_stt
(u:R.universe)
(ty:R.term)
(pre: R.term)
(post: R.term)
(frame: R.term)
(t:R.term)
: R.term
= let frame_lid = mk_pulse_lib_core_lid "frame_stt" in
let frame_fv = R.pack_fv frame_lid in
let frame_univ_inst u = R.pack_ln (R.Tv_UInst (R.pack_fv frame_lid) [u]) in
let head = frame_univ_inst u in
R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app head [(ty, R.Q_Implicit)])
[(pre, R.Q_Implicit)])
[(post, R.Q_Implicit)])
[(frame, R.Q_Explicit)])
[(t, R.Q_Explicit)]
// Wrapper.frame_stt_atomic<u> #a #opened #pre #post frame e
let mk_frame_stt_atomic (u:R.universe) (a opened pre post frame e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "frame_stt_atomic" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (frame, Q_Explicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.frame_stt_ghost<u> #a #opened #pre #post frame e
let mk_frame_stt_ghost (u:R.universe) (a opened pre post frame e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "frame_stt_ghost" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (frame, Q_Explicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.sub_stt<u> #ty #pre1 pre2 #post1 post2 () () e
let mk_sub_stt
(u:R.universe)
(ty:R.term)
(pre1 pre2: R.term)
(post1 post2: R.term)
(t:R.term)
: R.term
= let subsumption_lid = mk_pulse_lib_core_lid "sub_stt" in
let subsumption_fv = R.pack_fv subsumption_lid in
let subsumption_univ_inst u = R.pack_ln (R.Tv_UInst subsumption_fv [u]) in
let head = subsumption_univ_inst u in
R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app head [(ty, R.Q_Implicit)])
[(pre1, R.Q_Implicit)])
[(pre2, R.Q_Explicit)])
[(post1, R.Q_Implicit)])
[(post2, R.Q_Explicit)])
[(`(), R.Q_Explicit)])
[(`(), R.Q_Explicit)])
[(t, R.Q_Explicit)]
// Wrapper.sub_stt_atomic<u> #a #opened #pre1 pre2 #post1 post2 () () e
let mk_sub_stt_atomic (u:R.universe) (a opened pre1 pre2 post1 post2 e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "sub_stt_atomic" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre2, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (`(), Q_Explicit)) in
let t = pack_ln (R.Tv_App t (`(), Q_Explicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.sub_stt_ghost<u> #a #opened #pre1 pre2 #post1 post2 () () e
let mk_sub_stt_ghost (u:R.universe) (a opened pre1 pre2 post1 post2 e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "sub_stt_ghost" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre2, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Explicit)) in
let t = pack_ln (R.Tv_App t (`(), Q_Explicit)) in
let t = pack_ln (R.Tv_App t (`(), Q_Explicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
let mk_par (u:R.universe) (aL aR preL postL preR postR eL eR:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "stt_par" in
let t = pack_ln (Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (Tv_App t (aL, Q_Implicit)) in
let t = pack_ln (Tv_App t (aR, Q_Implicit)) in
let t = pack_ln (Tv_App t (preL, Q_Implicit)) in
let t = pack_ln (Tv_App t (postL, Q_Implicit)) in
let t = pack_ln (Tv_App t (preR, Q_Implicit)) in
let t = pack_ln (Tv_App t (postR, Q_Implicit)) in
let t = pack_ln (Tv_App t (eL, Q_Explicit)) in
pack_ln (Tv_App t (eR, Q_Explicit))
let mk_rewrite (p q:R.term) =
let open R in
let t = pack_ln (Tv_FVar (pack_fv (mk_pulse_lib_core_lid "rewrite"))) in
let t = pack_ln (Tv_App t (p, Q_Explicit)) in
let t = pack_ln (Tv_App t (q, Q_Explicit)) in
pack_ln (Tv_App t (`(), Q_Explicit))
let mk_withlocal (ret_u:R.universe) (a init pre ret_t post body:R.term) =
let open R in
let lid = mk_pulse_lib_reference_lid "with_local" in
let t = pack_ln (Tv_UInst (R.pack_fv lid) [ret_u]) in
let t = pack_ln (Tv_App t (a, Q_Implicit)) in
let t = pack_ln (Tv_App t (init, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (Tv_App t (ret_t, Q_Implicit)) in
let t = pack_ln (Tv_App t (post, Q_Implicit)) in
pack_ln (Tv_App t (body, Q_Explicit))
///// Utils to derive equiv for common constructs /////
let mk_star_equiv (g:R.env) (t1 t2 t3 t4:R.term)
(eq1:RT.equiv g t1 t3)
(eq2:RT.equiv g t2 t4)
: RT.equiv g (mk_star t1 t2) (mk_star t3 t4) =
admit ()
let mk_stt_comp_equiv (g:R.env) (u:R.universe) (res1 pre1 post1 res2 pre2 post2:R.term)
(res_eq: RT.equiv g res1 res2)
(pre_eq:RT.equiv g pre1 pre2)
(post_eq:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_comp u res1 pre1 post1)
(mk_stt_comp u res2 pre2 post2)
= admit ()
let mk_stt_atomic_comp_equiv (g:R.env) (u:R.universe) (res inames pre1 post1 pre2 post2:R.term)
(pre_eq:RT.equiv g pre1 pre2)
(post_eq:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_atomic_comp u res inames pre1 post1)
(mk_stt_atomic_comp u res inames pre2 post2)
= admit ()
let mk_stt_ghost_comp_equiv (g:R.env) (u:R.universe) (res inames pre1 post1 pre2 post2:R.term)
(pre_eq:RT.equiv g pre1 pre2)
(post_eq:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u res inames pre1 post1)
(mk_stt_ghost_comp u res inames pre2 post2)
= admit ()
let ref_lid = mk_pulse_lib_reference_lid "ref"
let pts_to_lid = mk_pulse_lib_reference_lid "pts_to"
let full_perm_lid = ["Steel"; "FractionalPermission"; "full_perm"]
let mk_ref (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv ref_lid)) in
pack_ln (Tv_App t (a, Q_Explicit))
let mk_pts_to (a:R.term) (r:R.term) (perm:R.term) (v:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pts_to_lid)) in
let t = pack_ln (Tv_App t (a, Q_Implicit)) in
let t = pack_ln (Tv_App t (r, Q_Explicit)) in
let t = pack_ln (Tv_App t (perm, Q_Implicit)) in
pack_ln (Tv_App t (v, Q_Explicit))
let full_perm_tm : R.term =
let open R in
pack_ln (Tv_FVar (pack_fv full_perm_lid))
let pulse_lib_array_core = ["Pulse"; "Lib"; "Array"; "Core"]
let mk_pulse_lib_array_core_lid s = pulse_lib_array_core @ [s]
let array_lid = mk_pulse_lib_array_core_lid "array"
let array_pts_to_lid = mk_pulse_lib_array_core_lid "pts_to" | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val array_is_full_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.array_is_full_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 67,
"end_line": 622,
"start_col": 24,
"start_line": 622
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let return_stt_ghost_lid = mk_pulse_lib_core_lid "return_stt_ghost" | let return_stt_ghost_lid = | false | null | false | mk_pulse_lib_core_lid "return_stt_ghost" | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let mk_intro_exists_erased (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_erased_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let while_lid = mk_pulse_lib_core_lid "while_loop"
let mk_while (inv cond body:R.term) : R.term =
let t = R.pack_ln (R.Tv_FVar (R.pack_fv while_lid)) in
let t = R.pack_ln (R.Tv_App t (inv, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (cond, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (body, R.Q_Explicit))
let vprop_eq_tm t1 t2 =
let open R in
let u2 =
pack_universe (Uv_Succ (pack_universe (Uv_Succ (pack_universe Uv_Zero)))) in
let t = pack_ln (Tv_UInst (pack_fv eq2_qn) [u2]) in
let t = pack_ln (Tv_App t (pack_ln (Tv_FVar (pack_fv vprop_lid)), Q_Implicit)) in
let t = pack_ln (Tv_App t (t1, Q_Explicit)) in
let t = pack_ln (Tv_App t (t2, Q_Explicit)) in
t
let emp_inames_tm : R.term = R.pack_ln (R.Tv_FVar (R.pack_fv emp_inames_lid))
let non_informative_witness_lid = mk_pulse_lib_core_lid "non_informative_witness"
let non_informative_witness_rt (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv non_informative_witness_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Explicit)) in
t
let stt_vprop_equiv_fv =
R.pack_fv (mk_pulse_lib_core_lid "vprop_equiv")
let stt_vprop_equiv_tm =
R.pack_ln (R.Tv_FVar stt_vprop_equiv_fv)
let stt_vprop_equiv (t1 t2:R.term) =
let open R in
let t = pack_ln (Tv_App stt_vprop_equiv_tm (t1, Q_Explicit)) in
pack_ln (Tv_App t (t2, Q_Explicit))
let return_stt_lid = mk_pulse_lib_core_lid "return_stt"
let return_stt_noeq_lid = mk_pulse_lib_core_lid "return"
let return_stt_atomic_lid = mk_pulse_lib_core_lid "return_stt_atomic" | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val return_stt_ghost_lid : Prims.list Prims.string | [] | Pulse.Reflection.Util.return_stt_ghost_lid | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Prims.list Prims.string | {
"end_col": 67,
"end_line": 281,
"start_col": 27,
"start_line": 281
} |
|
Prims.Tot | val mk_pure (p: R.term) : R.term | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit)) | val mk_pure (p: R.term) : R.term
let mk_pure (p: R.term) : R.term = | false | null | false | let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit)) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.term",
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_App",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Reflection.V2.Data.aqualv",
"FStar.Reflection.V2.Data.Q_Explicit",
"FStar.Reflection.V2.Data.Tv_FVar",
"FStar.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.pure_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_pure (p: R.term) : R.term | [] | Pulse.Reflection.Util.mk_pure | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | p: FStar.Reflection.Types.term -> FStar.Reflection.Types.term | {
"end_col": 36,
"end_line": 86,
"start_col": 2,
"start_line": 84
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let stt_atomic_fv = R.pack_fv stt_atomic_lid | let stt_atomic_fv = | false | null | false | R.pack_fv stt_atomic_lid | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.stt_atomic_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val stt_atomic_fv : FStar.Reflection.Types.fv | [] | Pulse.Reflection.Util.stt_atomic_fv | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | FStar.Reflection.Types.fv | {
"end_col": 44,
"end_line": 143,
"start_col": 20,
"start_line": 143
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let arrow_dom = (R.term & R.aqualv) | let arrow_dom = | false | null | false | (R.term & R.aqualv) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Pervasives.Native.tuple2",
"FStar.Reflection.Types.term",
"FStar.Reflection.V2.Data.aqualv"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i))) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val arrow_dom : Type0 | [] | Pulse.Reflection.Util.arrow_dom | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | Type0 | {
"end_col": 35,
"end_line": 183,
"start_col": 16,
"start_line": 183
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let stt_ghost_fv = R.pack_fv stt_ghost_lid | let stt_ghost_fv = | false | null | false | R.pack_fv stt_ghost_lid | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.V2.Builtins.pack_fv",
"Pulse.Reflection.Util.stt_ghost_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit)) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val stt_ghost_fv : FStar.Reflection.Types.fv | [] | Pulse.Reflection.Util.stt_ghost_fv | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} | FStar.Reflection.Types.fv | {
"end_col": 42,
"end_line": 153,
"start_col": 19,
"start_line": 153
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.List.Tot",
"short_module": null
},
{
"abbrev": true,
"full_module": "Pulse.RuntimeUtils",
"short_module": "RU"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.Typing",
"short_module": "RT"
},
{
"abbrev": true,
"full_module": "FStar.Tactics.V2",
"short_module": "T"
},
{
"abbrev": true,
"full_module": "FStar.Reflection.V2",
"short_module": "R"
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "Pulse.Reflection",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_bind_ghost
(u1 u2:R.universe)
(a b opened pre1 post1 post2 e1 e2:R.term) =
let open R in
let bind_lid = mk_pulse_lib_core_lid "bind_sttg" in
let t = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (b, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e1, Q_Explicit)) in
pack_ln (R.Tv_App t (e2, Q_Explicit)) | let mk_bind_ghost (u1 u2: R.universe) (a b opened pre1 post1 post2 e1 e2: R.term) = | false | null | false | let open R in
let bind_lid = mk_pulse_lib_core_lid "bind_sttg" in
let t = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1; u2]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (b, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post1, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post2, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e1, Q_Explicit)) in
pack_ln (R.Tv_App t (e2, Q_Explicit)) | {
"checked_file": "Pulse.Reflection.Util.fst.checked",
"dependencies": [
"Pulse.RuntimeUtils.fsti.checked",
"prims.fst.checked",
"FStar.Tactics.V2.fst.checked",
"FStar.Reflection.V2.fst.checked",
"FStar.Reflection.Typing.fsti.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.List.Tot.fst.checked",
"FStar.Ghost.fsti.checked"
],
"interface_file": false,
"source_file": "Pulse.Reflection.Util.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.universe",
"FStar.Reflection.Types.term",
"FStar.Reflection.V2.Builtins.pack_ln",
"FStar.Reflection.V2.Data.Tv_App",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Reflection.V2.Data.aqualv",
"FStar.Reflection.V2.Data.Q_Explicit",
"FStar.Reflection.V2.Data.Q_Implicit",
"FStar.Reflection.V2.Data.Tv_UInst",
"FStar.Reflection.V2.Builtins.pack_fv",
"Prims.Cons",
"Prims.Nil",
"Prims.list",
"Prims.string",
"Pulse.Reflection.Util.mk_pulse_lib_core_lid"
] | [] | module Pulse.Reflection.Util
module R = FStar.Reflection.V2
module T = FStar.Tactics.V2
module RT = FStar.Reflection.Typing
module RU = Pulse.RuntimeUtils
open FStar.List.Tot
let pulse_lib_core = ["Pulse"; "Lib"; "Core"]
let mk_pulse_lib_core_lid s = pulse_lib_core@[s]
let tun = R.pack_ln R.Tv_Unknown
let unit_lid = R.unit_lid
let bool_lid = R.bool_lid
let int_lid = R.int_lid
let erased_lid = ["FStar"; "Ghost"; "erased"]
let hide_lid = ["FStar"; "Ghost"; "hide"]
let reveal_lid = ["FStar"; "Ghost"; "reveal"]
let vprop_lid = mk_pulse_lib_core_lid "vprop"
let vprop_fv = R.pack_fv vprop_lid
let vprop_tm = R.pack_ln (R.Tv_FVar vprop_fv)
let unit_fv = R.pack_fv unit_lid
let unit_tm = R.pack_ln (R.Tv_FVar unit_fv)
let bool_fv = R.pack_fv bool_lid
let bool_tm = R.pack_ln (R.Tv_FVar bool_fv)
let nat_lid = ["Prims"; "nat"]
let nat_fv = R.pack_fv nat_lid
let nat_tm = R.pack_ln (R.Tv_FVar nat_fv)
let szt_lid = ["FStar"; "SizeT"; "t"]
let szt_fv = R.pack_fv szt_lid
let szt_tm = R.pack_ln (R.Tv_FVar szt_fv)
let szv_lid = ["FStar"; "SizeT"; "v"]
let szv_fv = R.pack_fv szv_lid
let szv_tm = R.pack_ln (R.Tv_FVar szv_fv)
let seq_lid = ["FStar"; "Seq"; "Base"; "seq"]
let seq_create_lid = ["FStar"; "Seq"; "Base"; "create"]
let tuple2_lid = ["FStar"; "Pervasives"; "Native"; "tuple2"]
let fst_lid = ["FStar"; "Pervasives"; "Native"; "fst"]
let snd_lid = ["FStar"; "Pervasives"; "Native"; "snd"]
let mk_tuple2 (u1 u2:R.universe) (a1 a2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv tuple2_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Explicit)) in
pack_ln (Tv_App t (a2, Q_Explicit))
let mk_fst (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv fst_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let mk_snd (u1 u2:R.universe) (a1 a2 e:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv snd_lid) [u1; u2]) in
let t = pack_ln (Tv_App t (a1, Q_Implicit)) in
let t = pack_ln (Tv_App t (a2, Q_Implicit)) in
pack_ln (Tv_App t (e, Q_Explicit))
let true_tm = R.pack_ln (R.Tv_Const (R.C_True))
let false_tm = R.pack_ln (R.Tv_Const (R.C_False))
let emp_lid = mk_pulse_lib_core_lid "emp"
let inames_lid = mk_pulse_lib_core_lid "inames"
let star_lid = mk_pulse_lib_core_lid "op_Star_Star"
let mk_star (l r:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv star_lid)) in
let t = pack_ln (Tv_App t (l, Q_Explicit)) in
pack_ln (Tv_App t (r, Q_Explicit))
let pure_lid = mk_pulse_lib_core_lid "pure"
let exists_lid = mk_pulse_lib_core_lid "exists_"
let forall_lid = mk_pulse_lib_core_lid "forall_"
let args_of (tms:list R.term) =
List.Tot.map (fun x -> x, R.Q_Explicit) tms
let mk_pure (p:R.term) : R.term =
let open R in
let t = pack_ln (Tv_FVar (pack_fv pure_lid)) in
pack_ln (Tv_App t (p, Q_Explicit))
let uzero = R.pack_universe (R.Uv_Zero)
let pulse_lib_reference = ["Pulse"; "Lib"; "Reference"]
let mk_pulse_lib_reference_lid s = pulse_lib_reference@[s]
let mk_squash (u:R.universe) (ty:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.squash_qn) [u]) in
pack_ln (Tv_App t (ty, Q_Explicit))
let mk_eq2 (u:R.universe) (ty e1 e2:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv R.eq2_qn) [u]) in
let t = pack_ln (Tv_App t (ty, Q_Implicit)) in
let t = pack_ln (Tv_App t (e1, Q_Explicit)) in
pack_ln (Tv_App t (e2, Q_Explicit))
let stt_admit_lid = mk_pulse_lib_core_lid "stt_admit"
let mk_stt_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_atomic_admit_lid = mk_pulse_lib_core_lid "stt_atomic_admit"
let mk_stt_atomic_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_atomic_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let stt_ghost_admit_lid = mk_pulse_lib_core_lid "stt_ghost_admit"
let mk_stt_ghost_admit (u:R.universe) (t pre post:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv stt_ghost_admit_lid) [u]) in
let t = pack_ln (Tv_App t (t, Q_Explicit)) in
let t = pack_ln (Tv_App t (pre, Q_Explicit)) in
pack_ln (Tv_App t (post, Q_Explicit))
let emp_inames_lid = mk_pulse_lib_core_lid "emp_inames"
let elim_pure_lid = mk_pulse_lib_core_lid "elim_pure"
//the thunked, value-type counterpart of the effect STT
let stt_lid = mk_pulse_lib_core_lid "stt"
let stt_fv = R.pack_fv stt_lid
let stt_tm = R.pack_ln (R.Tv_FVar stt_fv)
let mk_stt_comp (u:R.universe) (res pre post:R.term) : Tot R.term =
let t = R.pack_ln (R.Tv_UInst stt_fv [u]) in
let t = R.pack_ln (R.Tv_App t (res, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_atomic_lid = mk_pulse_lib_core_lid "stt_atomic"
let stt_atomic_fv = R.pack_fv stt_atomic_lid
let stt_atomic_tm = R.pack_ln (R.Tv_FVar stt_atomic_fv)
let mk_stt_atomic_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_atomic_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let stt_ghost_lid = mk_pulse_lib_core_lid "stt_ghost"
let stt_ghost_fv = R.pack_fv stt_ghost_lid
let stt_ghost_tm = R.pack_ln (R.Tv_FVar stt_ghost_fv)
let mk_stt_ghost_comp (u:R.universe) (a inames pre post:R.term) =
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_comp_post_equiv (g:R.env) (u:R.universe) (a inames pre post1 post2:R.term)
(posts_equiv:RT.equiv g post1 post2)
: RT.equiv g (mk_stt_ghost_comp u a inames pre post1)
(mk_stt_ghost_comp u a inames pre post2) =
let open R in
let open RT in
let t = R.pack_ln (R.Tv_UInst stt_ghost_fv [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (inames, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (pre, R.Q_Explicit)) in
Rel_ctxt g post1 post2
(Ctxt_app_arg t Q_Explicit Ctxt_hole)
posts_equiv
let mk_total t = R.C_Total t
let mk_ghost t = R.C_GTotal t
let binder_of_t_q t q = RT.binder_of_t_q t q
let binder_of_t_q_s (t:R.term) (q:R.aqualv) (s:RT.pp_name_t) = RT.mk_binder s t q
let bound_var i : R.term = RT.bound_var i
let mk_name i : R.term = R.pack_ln (R.Tv_Var (R.pack_namedv (RT.make_namedv i)))
let arrow_dom = (R.term & R.aqualv)
let mk_arrow (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q ty q) (R.pack_comp (mk_total out)))
let mk_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_total out)))
let mk_ghost_arrow_with_name (s:RT.pp_name_t) (f:arrow_dom) (out:R.term) : R.term =
let ty, q = f in
R.pack_ln (R.Tv_Arrow (binder_of_t_q_s ty q s) (R.pack_comp (mk_ghost out)))
let mk_abs ty qual t : R.term = RT.mk_abs ty qual t
let mk_abs_with_name s ty qual t : R.term = R.pack_ln (R.Tv_Abs (binder_of_t_q_s ty qual s) t)
let mk_abs_with_name_and_range s r ty qual t : R.term =
let b = (binder_of_t_q_s ty qual s) in
let b = RU.binder_set_range b r in
R.pack_ln (R.Tv_Abs b t)
let mk_erased (u:R.universe) (t:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv erased_lid) [u]) in
R.pack_ln (R.Tv_App hd (t, R.Q_Explicit))
let mk_reveal (u:R.universe) (t:R.term) (e:R.term) : R.term =
let hd = R.pack_ln (R.Tv_UInst (R.pack_fv reveal_lid) [u]) in
let hd = R.pack_ln (R.Tv_App hd (t, R.Q_Implicit)) in
R.pack_ln (R.Tv_App hd (e, R.Q_Explicit))
let elim_exists_lid = mk_pulse_lib_core_lid "elim_exists"
let intro_exists_lid = mk_pulse_lib_core_lid "intro_exists"
let intro_exists_erased_lid = mk_pulse_lib_core_lid "intro_exists_erased"
let mk_exists (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_forall (u:R.universe) (a p:R.term) =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv forall_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_elim_exists (u:R.universe) (a p:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv elim_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
R.pack_ln (R.Tv_App t (p, R.Q_Explicit))
let mk_intro_exists (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let mk_intro_exists_erased (u:R.universe) (a p:R.term) (e:R.term) : R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv intro_exists_erased_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (a, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (p, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (e, R.Q_Explicit))
let while_lid = mk_pulse_lib_core_lid "while_loop"
let mk_while (inv cond body:R.term) : R.term =
let t = R.pack_ln (R.Tv_FVar (R.pack_fv while_lid)) in
let t = R.pack_ln (R.Tv_App t (inv, R.Q_Explicit)) in
let t = R.pack_ln (R.Tv_App t (cond, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (body, R.Q_Explicit))
let vprop_eq_tm t1 t2 =
let open R in
let u2 =
pack_universe (Uv_Succ (pack_universe (Uv_Succ (pack_universe Uv_Zero)))) in
let t = pack_ln (Tv_UInst (pack_fv eq2_qn) [u2]) in
let t = pack_ln (Tv_App t (pack_ln (Tv_FVar (pack_fv vprop_lid)), Q_Implicit)) in
let t = pack_ln (Tv_App t (t1, Q_Explicit)) in
let t = pack_ln (Tv_App t (t2, Q_Explicit)) in
t
let emp_inames_tm : R.term = R.pack_ln (R.Tv_FVar (R.pack_fv emp_inames_lid))
let non_informative_witness_lid = mk_pulse_lib_core_lid "non_informative_witness"
let non_informative_witness_rt (u:R.universe) (a:R.term) : R.term =
let open R in
let t = pack_ln (Tv_UInst (pack_fv non_informative_witness_lid) [u]) in
let t = pack_ln (Tv_App t (a, Q_Explicit)) in
t
let stt_vprop_equiv_fv =
R.pack_fv (mk_pulse_lib_core_lid "vprop_equiv")
let stt_vprop_equiv_tm =
R.pack_ln (R.Tv_FVar stt_vprop_equiv_fv)
let stt_vprop_equiv (t1 t2:R.term) =
let open R in
let t = pack_ln (Tv_App stt_vprop_equiv_tm (t1, Q_Explicit)) in
pack_ln (Tv_App t (t2, Q_Explicit))
let return_stt_lid = mk_pulse_lib_core_lid "return_stt"
let return_stt_noeq_lid = mk_pulse_lib_core_lid "return"
let return_stt_atomic_lid = mk_pulse_lib_core_lid "return_stt_atomic"
let return_stt_atomic_noeq_lid = mk_pulse_lib_core_lid "return_stt_atomic_noeq"
let return_stt_ghost_lid = mk_pulse_lib_core_lid "return_stt_ghost"
let return_stt_ghost_noeq_lid = mk_pulse_lib_core_lid "return_stt_ghost_noeq"
let mk_stt_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_atomic_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_atomic_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_atomic_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_return (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_ghost_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
let mk_stt_ghost_return_noeq (u:R.universe) (ty:R.term) (t:R.term) (post:R.term)
: R.term =
let t = R.pack_ln (R.Tv_UInst (R.pack_fv return_stt_ghost_noeq_lid) [u]) in
let t = R.pack_ln (R.Tv_App t (ty, R.Q_Implicit)) in
let t = R.pack_ln (R.Tv_App t (t, R.Q_Explicit)) in
R.pack_ln (R.Tv_App t (post, R.Q_Explicit))
// Wrapper.lift_stt_atomic<u> #a #pre #post e
let mk_lift_atomic_stt (u:R.universe) (a pre post e:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "lift_stt_atomic" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
pack_ln (R.Tv_App t (e, Q_Explicit))
// Wrapper.lift_stt_ghost<u> #a #opened #pre #post e reveal_a
let mk_lift_ghost_atomic (u:R.universe) (a opened pre post e reveal_a:R.term) =
let open R in
let lid = mk_pulse_lib_core_lid "lift_stt_ghost" in
let t = pack_ln (R.Tv_UInst (R.pack_fv lid) [u]) in
let t = pack_ln (R.Tv_App t (a, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (opened, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (pre, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (post, Q_Implicit)) in
let t = pack_ln (R.Tv_App t (e, Q_Explicit)) in
pack_ln (R.Tv_App t (reveal_a, Q_Explicit))
// Wrapper.bind_stt<u1, u2> #a #b #pre1 #post1 #post2 e1 e2
let mk_bind_stt
(u1 u2:R.universe)
(ty1 ty2:R.term)
(pre1 post1: R.term)
(post2: R.term)
(t1 t2:R.term)
: R.term
= let bind_lid = mk_pulse_lib_core_lid "bind_stt" in
let head = R.pack_ln (R.Tv_UInst (R.pack_fv bind_lid) [u1;u2]) in
R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app
(R.mk_app head
[(ty1, R.Q_Implicit)])
[(ty2, R.Q_Implicit)])
[(pre1, R.Q_Implicit)])
[(post1, R.Q_Implicit)])
[(post2, R.Q_Implicit)])
[(t1, R.Q_Explicit)])
[(t2, R.Q_Explicit)]
// Wrapper.bind_sttg<u1, u2> #a #b #opened #pre1 #post1 #post2 e1 e2
let mk_bind_ghost
(u1 u2:R.universe) | false | true | Pulse.Reflection.Util.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_bind_ghost : u1: FStar.Reflection.Types.universe ->
u2: FStar.Reflection.Types.universe ->
a: FStar.Reflection.Types.term ->
b: FStar.Reflection.Types.term ->
opened: FStar.Reflection.Types.term ->
pre1: FStar.Reflection.Types.term ->
post1: FStar.Reflection.Types.term ->
post2: FStar.Reflection.Types.term ->
e1: FStar.Reflection.Types.term ->
e2: FStar.Reflection.Types.term
-> FStar.Reflection.Types.term | [] | Pulse.Reflection.Util.mk_bind_ghost | {
"file_name": "lib/steel/pulse/Pulse.Reflection.Util.fst",
"git_rev": "7fbb54e94dd4f48ff7cb867d3bae6889a635541e",
"git_url": "https://github.com/FStarLang/steel.git",
"project_name": "steel"
} |
u1: FStar.Reflection.Types.universe ->
u2: FStar.Reflection.Types.universe ->
a: FStar.Reflection.Types.term ->
b: FStar.Reflection.Types.term ->
opened: FStar.Reflection.Types.term ->
pre1: FStar.Reflection.Types.term ->
post1: FStar.Reflection.Types.term ->
post2: FStar.Reflection.Types.term ->
e1: FStar.Reflection.Types.term ->
e2: FStar.Reflection.Types.term
-> FStar.Reflection.Types.term | {
"end_col": 39,
"end_line": 393,
"start_col": 2,
"start_line": 383
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.